From 66d6b63ae5448330e7e822c5684340b3124cfd9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tor=20Hvals=C3=B8e?= Date: Tue, 22 Jul 2025 09:50:55 +0200 Subject: [PATCH 01/66] Adding ignore_missing_component_templates (#1206) * Adding ignore_missing_component_templates https://github.com/elastic/terraform-provider-elasticstack/issues/631 * Trigger workflow * Reverting trigger change * Adding acceptance tests on index template for parameter ignore_missing_component_templates * Updating changelog * Handling inconsistent tabs vs spaces * Handling inconsistent tabs vs spaces * es version awareness ignore_missing_component_templates * Formatting * Updating ressource ref from data source test * Update Changelog --------- Co-authored-by: Toby Brain Co-authored-by: Toby Brain --- CHANGELOG.md | 2 + .../elasticsearch_index_template.md | 1 + .../resources/elasticsearch_index_template.md | 1 + internal/elasticsearch/index/template.go | 35 +++++++++++ .../index/template_data_source.go | 8 +++ .../index/template_data_source_test.go | 32 ++++++++++ internal/elasticsearch/index/template_test.go | 58 +++++++++++++++++++ internal/models/models.go | 21 +++---- 8 files changed, 148 insertions(+), 10 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 2bcd4541b..88d7acee1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,7 @@ ## [Unreleased] +- Add `ignore_missing_component_templates` to `elasticstack_elasticsearch_index_template` ([#1206](https://github.com/elastic/terraform-provider-elasticstack/pull/1206)) + ## [0.11.17] - 2025-07-21 - Add `elasticstack_apm_agent_configuration` resource ([#1196](https://github.com/elastic/terraform-provider-elasticstack/pull/1196)) diff --git a/docs/data-sources/elasticsearch_index_template.md b/docs/data-sources/elasticsearch_index_template.md index c1363c3cd..9beebbb90 100644 --- a/docs/data-sources/elasticsearch_index_template.md +++ b/docs/data-sources/elasticsearch_index_template.md @@ -42,6 +42,7 @@ output "template" { - `composed_of` (List of String) An ordered list of component template names. - `data_stream` (List of Object) If this object is included, the template is used to create data streams and their backing indices. Supports an empty object. (see [below for nested schema](#nestedatt--data_stream)) - `id` (String) Internal identifier of the resource +- `ignore_missing_component_templates` (List of String) A list of component template names that are ignored if missing. - `index_patterns` (Set of String) Array of wildcard (*) expressions used to match the names of data streams and indices during creation. - `metadata` (String) Optional user metadata about the index template. - `priority` (Number) Priority to determine index template precedence when a new data stream or index is created. diff --git a/docs/resources/elasticsearch_index_template.md b/docs/resources/elasticsearch_index_template.md index 17cc732de..08715c32e 100644 --- a/docs/resources/elasticsearch_index_template.md +++ b/docs/resources/elasticsearch_index_template.md @@ -58,6 +58,7 @@ resource "elasticstack_elasticsearch_index_template" "my_data_stream" { - `composed_of` (List of String) An ordered list of component template names. - `data_stream` (Block List, Max: 1) If this object is included, the template is used to create data streams and their backing indices. Supports an empty object. (see [below for nested schema](#nestedblock--data_stream)) - `elasticsearch_connection` (Block List, Max: 1, Deprecated) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. (see [below for nested schema](#nestedblock--elasticsearch_connection)) +- `ignore_missing_component_templates` (List of String) A list of component template names that are ignored if missing. - `metadata` (String) Optional user metadata about the index template. - `priority` (Number) Priority to determine index template precedence when a new data stream or index is created. - `template` (Block List, Max: 1) Template to be applied. It may optionally include an aliases, mappings, lifecycle, or settings configuration. (see [below for nested schema](#nestedblock--template)) diff --git a/internal/elasticsearch/index/template.go b/internal/elasticsearch/index/template.go index d5a70cfc9..30fecfa24 100644 --- a/internal/elasticsearch/index/template.go +++ b/internal/elasticsearch/index/template.go @@ -10,12 +10,17 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch" "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/go-version" "github.com/hashicorp/terraform-plugin-log/tflog" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +var ( + MinSupportedIgnoreMissingComponentTemplateVersion = version.Must(version.NewVersion("8.7.0")) +) + func ResourceTemplate() *schema.Resource { templateSchema := map[string]*schema.Schema{ "id": { @@ -38,6 +43,15 @@ func ResourceTemplate() *schema.Resource { Type: schema.TypeString, }, }, + "ignore_missing_component_templates": { + Description: "A list of component template names that are ignored if missing.", + Type: schema.TypeList, + Optional: true, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, "data_stream": { Description: "If this object is included, the template is used to create data streams and their backing indices. Supports an empty object.", Type: schema.TypeList, @@ -210,6 +224,12 @@ func resourceIndexTemplatePut(ctx context.Context, d *schema.ResourceData, meta if diags.HasError() { return diags } + + serverVersion, diags := client.ServerVersion(ctx) + if diags.HasError() { + return diags + } + var indexTemplate models.IndexTemplate indexTemplate.Name = templateId @@ -221,6 +241,18 @@ func resourceIndexTemplatePut(ctx context.Context, d *schema.ResourceData, meta } indexTemplate.ComposedOf = compsOf + if v, ok := d.GetOk("ignore_missing_component_templates"); ok { + compsOfIgnore := make([]string, 0) + for _, c := range v.([]interface{}) { + compsOfIgnore = append(compsOfIgnore, c.(string)) + } + + if len(compsOfIgnore) > 0 && serverVersion.LessThan(MinSupportedIgnoreMissingComponentTemplateVersion) { + return diag.FromErr(fmt.Errorf("'ignore_missing_component_templates' is supported only for Elasticsearch v%s and above", MinSupportedIgnoreMissingComponentTemplateVersion.String())) + } + indexTemplate.IgnoreMissingComponentTemplates = compsOfIgnore + } + if v, ok := d.GetOk("data_stream"); ok { // 8.x workaround hasAllowCustomRouting := false @@ -371,6 +403,9 @@ func resourceIndexTemplateRead(ctx context.Context, d *schema.ResourceData, meta if err := d.Set("composed_of", tpl.IndexTemplate.ComposedOf); err != nil { return diag.FromErr(err) } + if err := d.Set("ignore_missing_component_templates", tpl.IndexTemplate.IgnoreMissingComponentTemplates); err != nil { + return diag.FromErr(err) + } if stream := tpl.IndexTemplate.DataStream; stream != nil { ds := make([]interface{}, 1) dSettings := make(map[string]interface{}) diff --git a/internal/elasticsearch/index/template_data_source.go b/internal/elasticsearch/index/template_data_source.go index 8d1630268..14bc02c37 100644 --- a/internal/elasticsearch/index/template_data_source.go +++ b/internal/elasticsearch/index/template_data_source.go @@ -29,6 +29,14 @@ func DataSourceTemplate() *schema.Resource { Type: schema.TypeString, }, }, + "ignore_missing_component_templates": { + Description: "A list of component template names that are ignored if missing.", + Type: schema.TypeList, + Computed: true, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, "data_stream": { Description: "If this object is included, the template is used to create data streams and their backing indices. Supports an empty object.", Type: schema.TypeList, diff --git a/internal/elasticsearch/index/template_data_source_test.go b/internal/elasticsearch/index/template_data_source_test.go index c37ee5797..6cc4aa0cf 100644 --- a/internal/elasticsearch/index/template_data_source_test.go +++ b/internal/elasticsearch/index/template_data_source_test.go @@ -5,6 +5,8 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" + "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index" + "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) @@ -12,6 +14,7 @@ import ( func TestAccIndexTemplateDataSource(t *testing.T) { // generate a random role name templateName := "test-template-" + sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) + templateNameComponent := "test-template-" + sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) resource.Test(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(t) }, @@ -25,6 +28,16 @@ func TestAccIndexTemplateDataSource(t *testing.T) { resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_index_template.test", "priority", "100"), ), }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(index.MinSupportedIgnoreMissingComponentTemplateVersion), + Config: testAccIndexTemplateDataSourceWithIgnoreComponentConfig(templateNameComponent), + Check: resource.ComposeAggregateTestCheckFunc( + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_index_template.test_component", "name", templateNameComponent), + resource.TestCheckTypeSetElemAttr("data.elasticstack_elasticsearch_index_template.test_component", "index_patterns.*", fmt.Sprintf("tf-acc-component-%s-*", templateNameComponent)), + resource.TestCheckTypeSetElemAttr("data.elasticstack_elasticsearch_index_template.test_component", "composed_of.*", fmt.Sprintf("%s-logscomponent@custom", templateNameComponent)), + resource.TestCheckTypeSetElemAttr("data.elasticstack_elasticsearch_index_template.test_component", "ignore_missing_component_templates.*", fmt.Sprintf("%s-logscomponent@custom", templateNameComponent)), + ), + }, }, }) } @@ -47,3 +60,22 @@ data "elasticstack_elasticsearch_index_template" "test" { } `, templateName, templateName) } + +func testAccIndexTemplateDataSourceWithIgnoreComponentConfig(templateName string) string { + return fmt.Sprintf(` +provider "elasticstack" { + elasticsearch {} +} + +resource "elasticstack_elasticsearch_index_template" "test_component" { + name = "%s" + index_patterns = ["tf-acc-component-%s-*"] + composed_of = ["%s-logscomponent@custom"] + ignore_missing_component_templates = ["%s-logscomponent@custom"] +} + +data "elasticstack_elasticsearch_index_template" "test_component" { + name = elasticstack_elasticsearch_index_template.test_component.name +} + `, templateName, templateName, templateName, templateName) +} diff --git a/internal/elasticsearch/index/template_test.go b/internal/elasticsearch/index/template_test.go index e0d3ce7a4..7e0ffd8f2 100644 --- a/internal/elasticsearch/index/template_test.go +++ b/internal/elasticsearch/index/template_test.go @@ -6,6 +6,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index" + "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" @@ -14,6 +16,7 @@ import ( func TestAccResourceIndexTemplate(t *testing.T) { // generate random template name templateName := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) + templateNameComponent := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) resource.Test(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(t) }, @@ -41,6 +44,26 @@ func TestAccResourceIndexTemplate(t *testing.T) { resource.TestCheckResourceAttr("elasticstack_elasticsearch_index_template.test2", "data_stream.0.hidden", "false"), ), }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(index.MinSupportedIgnoreMissingComponentTemplateVersion), + Config: testAccResourceIndexTemplateCreateWithIgnoreComponent(templateNameComponent), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_elasticsearch_index_template.test_component", "name", templateNameComponent), + resource.TestCheckTypeSetElemAttr("elasticstack_elasticsearch_index_template.test_component", "index_patterns.*", fmt.Sprintf("%s-logscomponent-*", templateNameComponent)), + resource.TestCheckTypeSetElemAttr("elasticstack_elasticsearch_index_template.test_component", "composed_of.*", fmt.Sprintf("%s-logscomponent@custom", templateNameComponent)), + resource.TestCheckTypeSetElemAttr("elasticstack_elasticsearch_index_template.test_component", "ignore_missing_component_templates.*", fmt.Sprintf("%s-logscomponent@custom", templateNameComponent)), + ), + }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(index.MinSupportedIgnoreMissingComponentTemplateVersion), + Config: testAccResourceIndexTemplateUpdateWithIgnoreComponent(templateNameComponent), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_elasticsearch_index_template.test_component", "name", templateNameComponent), + resource.TestCheckTypeSetElemAttr("elasticstack_elasticsearch_index_template.test_component", "index_patterns.*", fmt.Sprintf("%s-logscomponent-*", templateNameComponent)), + resource.TestCheckTypeSetElemAttr("elasticstack_elasticsearch_index_template.test_component", "composed_of.*", fmt.Sprintf("%s-logscomponent-updated@custom", templateNameComponent)), + resource.TestCheckTypeSetElemAttr("elasticstack_elasticsearch_index_template.test_component", "ignore_missing_component_templates.*", fmt.Sprintf("%s-logscomponent-updated@custom", templateNameComponent)), + ), + }, }, }) } @@ -117,6 +140,41 @@ resource "elasticstack_elasticsearch_index_template" "test2" { `, name, name, name) } +func testAccResourceIndexTemplateCreateWithIgnoreComponent(name string) string { + return fmt.Sprintf(` +provider "elasticstack" { + elasticsearch {} +} + +resource "elasticstack_elasticsearch_index_template" "test_component" { + name = "%s" + index_patterns = ["%s-logscomponent-*"] + + composed_of = ["%s-logscomponent@custom"] + ignore_missing_component_templates = ["%s-logscomponent@custom"] +} + `, name, name, name, name) +} + +func testAccResourceIndexTemplateUpdateWithIgnoreComponent(name string) string { + return fmt.Sprintf(` +provider "elasticstack" { + elasticsearch {} +} + +resource "elasticstack_elasticsearch_index_template" "test_component" { + name = "%s" + index_patterns = ["%s-logscomponent-*"] + + composed_of = ["%s-logscomponent-updated@custom"] + ignore_missing_component_templates = ["%s-logscomponent-updated@custom"] + + template { + } +} + `, name, name, name, name) +} + func checkResourceIndexTemplateDestroy(s *terraform.State) error { client, err := clients.NewAcceptanceTestingClient() if err != nil { diff --git a/internal/models/models.go b/internal/models/models.go index 3cc9c1e18..5608df749 100644 --- a/internal/models/models.go +++ b/internal/models/models.go @@ -163,16 +163,17 @@ type Application struct { } type IndexTemplate struct { - Name string `json:"-"` - Create bool `json:"-"` - Timeout string `json:"-"` - ComposedOf []string `json:"composed_of"` - DataStream *DataStreamSettings `json:"data_stream,omitempty"` - IndexPatterns []string `json:"index_patterns"` - Meta map[string]interface{} `json:"_meta,omitempty"` - Priority *int `json:"priority,omitempty"` - Template *Template `json:"template,omitempty"` - Version *int `json:"version,omitempty"` + Name string `json:"-"` + Create bool `json:"-"` + Timeout string `json:"-"` + ComposedOf []string `json:"composed_of"` + IgnoreMissingComponentTemplates []string `json:"ignore_missing_component_templates,omitempty"` + DataStream *DataStreamSettings `json:"data_stream,omitempty"` + IndexPatterns []string `json:"index_patterns"` + Meta map[string]interface{} `json:"_meta,omitempty"` + Priority *int `json:"priority,omitempty"` + Template *Template `json:"template,omitempty"` + Version *int `json:"version,omitempty"` } type DataStreamSettings struct { From 7427c20183bbd469b7192118702bcfa97efa9141 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sat, 26 Jul 2025 07:27:50 +1000 Subject: [PATCH 02/66] chore(deps): pin dependencies (#1216) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index d9b9ccac6..e4a77caa1 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -33,7 +33,7 @@ jobs: TF_ACC: "1" services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:9.0.3 + image: docker.elastic.co/elasticsearch/elasticsearch:9.0.3@sha256:b21843a4a2efafcb0475ead137ce0a669fca412739694de833a2697f121a87b9 env: discovery.type: single-node xpack.security.enabled: true @@ -48,7 +48,7 @@ jobs: - 9200:9200 options: --health-cmd="curl http://localhost:9200/_cluster/health" --health-interval=10s --health-timeout=5s --health-retries=10 kibana: - image: docker.elastic.co/kibana/kibana:9.0.3 + image: docker.elastic.co/kibana/kibana:9.0.3@sha256:c4c00a485fbc3619d8373f3bc74e9dd5b5a34380ef50442be4366e8fb57cd50a env: SERVER_NAME: kibana ELASTICSEARCH_HOSTS: http://elasticsearch:9200 @@ -60,7 +60,7 @@ jobs: - 5601:5601 options: --health-cmd="curl http://localhost:5601/api/status" --health-interval=10s --health-timeout=5s --health-retries=10 fleet: - image: docker.elastic.co/elastic-agent/elastic-agent:9.0.3 + image: docker.elastic.co/elastic-agent/elastic-agent:9.0.3@sha256:c143c196d75078d1633c436fc8ab1e8c0d387d7131e3ace8bac1c1eea6d583ff env: SERVER_NAME: fleet FLEET_ENROLL: "1" From 16b7a2828cbf2fe2d35bf8c244e220fb5d69c6b4 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sat, 26 Jul 2025 09:57:26 +0000 Subject: [PATCH 03/66] chore(deps): update golang:1.24.5 docker digest to ef5b4be (#1217) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .buildkite/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/release.yml b/.buildkite/release.yml index a40a71917..1877c6005 100644 --- a/.buildkite/release.yml +++ b/.buildkite/release.yml @@ -1,7 +1,7 @@ steps: - label: Release agents: - image: "golang:1.24.5@sha256:14fd8a55e59a560704e5fc44970b301d00d344e45d6b914dda228e09f359a088" + image: "golang:1.24.5@sha256:ef5b4be1f94b36c90385abd9b6b4f201723ae28e71acacb76d00687333c17282" cpu: "16" memory: "24G" ephemeralStorage: "20G" From d2f8c46248d27b3fb3283be2c73d5e88c43e11aa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 27 Jul 2025 09:14:32 +1000 Subject: [PATCH 04/66] Bump github.com/oapi-codegen/runtime from 1.1.1 to 1.1.2 (#1213) --- updated-dependencies: - dependency-name: github.com/oapi-codegen/runtime dependency-version: 1.1.2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 83f0abd4f..8cecb3913 100644 --- a/go.mod +++ b/go.mod @@ -21,7 +21,7 @@ require ( github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c github.com/oapi-codegen/oapi-codegen/v2 v2.5.0 - github.com/oapi-codegen/runtime v1.1.1 + github.com/oapi-codegen/runtime v1.1.2 github.com/stretchr/testify v1.10.0 go.uber.org/mock v0.5.2 ) diff --git a/go.sum b/go.sum index 17ee66774..8902c2943 100644 --- a/go.sum +++ b/go.sum @@ -813,8 +813,8 @@ github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY= github.com/nxadm/tail v1.4.11/go.mod h1:OTaG3NK980DZzxbRq6lEuzgU+mug70nY11sMd4JXXHc= github.com/oapi-codegen/oapi-codegen/v2 v2.5.0 h1:iJvF8SdB/3/+eGOXEpsWkD8FQAHj6mqkb6Fnsoc8MFU= github.com/oapi-codegen/oapi-codegen/v2 v2.5.0/go.mod h1:fwlMxUEMuQK5ih9aymrxKPQqNm2n8bdLk1ppjH+lr9w= -github.com/oapi-codegen/runtime v1.1.1 h1:EXLHh0DXIJnWhdRPN2w4MXAzFyE4CskzhNLUmtpMYro= -github.com/oapi-codegen/runtime v1.1.1/go.mod h1:SK9X900oXmPWilYR5/WKPzt3Kqxn/uS/+lbpREv+eCg= +github.com/oapi-codegen/runtime v1.1.2 h1:P2+CubHq8fO4Q6fV1tqDBZHCwpVpvPg7oKiYzQgXIyI= +github.com/oapi-codegen/runtime v1.1.2/go.mod h1:SK9X900oXmPWilYR5/WKPzt3Kqxn/uS/+lbpREv+eCg= github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 h1:G7ERwszslrBzRxj//JalHPu/3yz+De2J+4aLtSRlHiY= github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037/go.mod h1:2bpvgLBZEtENV5scfDFEtB/5+1M4hkQhDQrccEJ/qGw= github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 h1:bQx3WeLcUWy+RletIKwUIt4x3t8n2SxavmoclizMb8c= From 0456818a8cfd0267f0ac70210b856479fc117018 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Mon, 28 Jul 2025 10:47:18 +1000 Subject: [PATCH 05/66] Prevent provider panic when referenced script does not exist in ES (#1218) * Prevent provider panic when referenced script does not exist in ES * Changelog --- CHANGELOG.md | 1 + internal/elasticsearch/cluster/script.go | 1 + internal/elasticsearch/cluster/script_test.go | 21 +++++++++++++++++++ 3 files changed, 23 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 88d7acee1..feaedb045 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,7 @@ ## [Unreleased] - Add `ignore_missing_component_templates` to `elasticstack_elasticsearch_index_template` ([#1206](https://github.com/elastic/terraform-provider-elasticstack/pull/1206)) +- Prevent provider panic when a script exists in state, but not in Elasticsearch ([#1218](https://github.com/elastic/terraform-provider-elasticstack/pull/1218)) ## [0.11.17] - 2025-07-21 diff --git a/internal/elasticsearch/cluster/script.go b/internal/elasticsearch/cluster/script.go index 8b09c443e..1a96f7c27 100644 --- a/internal/elasticsearch/cluster/script.go +++ b/internal/elasticsearch/cluster/script.go @@ -81,6 +81,7 @@ func resourceScriptRead(ctx context.Context, d *schema.ResourceData, meta interf if script == nil && diags == nil { tflog.Warn(ctx, fmt.Sprintf(`Script "%s" not found, removing from state`, compId.ResourceId)) d.SetId("") + return nil } if diags.HasError() { return diags diff --git a/internal/elasticsearch/cluster/script_test.go b/internal/elasticsearch/cluster/script_test.go index 6c8170f53..eb31cf320 100644 --- a/internal/elasticsearch/cluster/script_test.go +++ b/internal/elasticsearch/cluster/script_test.go @@ -9,6 +9,7 @@ import ( sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/stretchr/testify/require" ) func TestAccResourceScript(t *testing.T) { @@ -37,6 +38,26 @@ func TestAccResourceScript(t *testing.T) { resource.TestCheckResourceAttr("elasticstack_elasticsearch_script.test", "params", `{"changed_modifier":2}`), ), }, + { + // Ensure the provider doesn't panic if the script has been deleted outside of the Terraform flow + PreConfig: func() { + client, err := clients.NewAcceptanceTestingClient() + require.NoError(t, err) + + esClient, err := client.GetESClient() + require.NoError(t, err) + + _, err = esClient.DeleteScript(scriptID) + require.NoError(t, err) + }, + Config: testAccScriptUpdate(scriptID), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_elasticsearch_script.test", "script_id", scriptID), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_script.test", "lang", "painless"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_script.test", "source", "Math.log(_score * 4) + params['changed_modifier']"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_script.test", "params", `{"changed_modifier":2}`), + ), + }, }, }) } From 641f7f68888a380e71b21669459206eab05c30f4 Mon Sep 17 00:00:00 2001 From: Nick Clark Date: Tue, 29 Jul 2025 17:29:54 +1000 Subject: [PATCH 06/66] Add copilot instructions and issue template tweaks (#1205) * Add copilot instructions and issue template tweaks * Update .github/copilot-instructions.md Co-authored-by: Toby Brain * Update .github/copilot-instructions.md Co-authored-by: Toby Brain * Update .github/copilot-instructions.md Co-authored-by: Toby Brain * remove dumb file counts * Build out prompt further * Update copilot-instructions.md Co-authored-by: Toby Brain * Update copilot-instructions.md Co-authored-by: Toby Brain --------- Co-authored-by: Toby Brain --- .github/ISSUE_TEMPLATE/bug_report.md | 2 +- .github/copilot-instructions.md | 122 +++++++++++++++++++++++++++ 2 files changed, 123 insertions(+), 1 deletion(-) create mode 100644 .github/copilot-instructions.md diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index c3f5cea54..30682190a 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -36,4 +36,4 @@ If applicable, add screenshots to help explain your problem. - Elasticsearch Version [e.g. 7.16.0] **Additional context** -Add any other context about the problem here. +Add any other context about the problem here. Links to specific affected code files and paths here are also extremely useful (if known). diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 000000000..27611aa2f --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,122 @@ +You will be tasked to fix an issue from an open-source repository. This is a Go based repository hosting a Terrform provider for the elastic stack (elasticsearch and kibana) APIs. This repo currently supports both [plugin framework](https://developer.hashicorp.com/terraform/plugin/framework/getting-started/code-walkthrough) and [sdkv2](https://developer.hashicorp.com/terraform/plugin/sdkv2) resources. Unless you're told otherwise, all new resources _must_ use the plugin framework. + + + + +Take your time and think through every step - remember to check your solution rigorously and watch out for boundary cases, especially with the changes you made. Your solution must be perfect. If not, continue working on it. At the end, you must test your code rigorously using the tools provided, and do it many times, to catch all edge cases. If it is not robust, iterate more and make it perfect. Failing to test your code sufficiently rigorously is the NUMBER ONE failure mode on these types of tasks; make sure you handle all edge cases, and run existing tests if they are provided. + + +Please see [README.md](../README.md) and the [CONTRIBUTING.md](../CONTRIBUTING.md) docs before getting started. + + +# Workflow + +## High-Level Problem Solving Strategy + +1. Understand the problem deeply. Carefully read the issue and think critically about what is required. +2. Investigate the codebase. Explore relevant files, search for key functions, and gather context. +3. Develop a clear, step-by-step plan. Break down the fix into manageable, incremental steps. +4. Implement the fix incrementally. Make small, testable code changes. +5. Debug as needed. Use debugging techniques to isolate and resolve issues. +6. Test frequently. Run tests after each change to verify correctness. +7. Iterate until the root cause is fixed and all tests pass. +8. Reflect and validate comprehensively. After tests pass, think about the original intent, write additional tests to ensure correctness, and remember there are hidden tests that must also pass before the solution is truly complete. + +Refer to the detailed sections below for more information on each step. + +## 1. Deeply Understand the Problem +Carefully read the issue and think hard about a plan to solve it before coding. Your thinking should be thorough and so it's fine if it's very long. You can think step by step before and after each action you decide to take. + +## 2. Codebase Investigation +- Explore relevant files and directories. +- Search for key functions, classes, or variables related to the issue. +- Read and understand relevant code snippets. +- Identify the root cause of the problem. +- Validate and update your understanding continuously as you gather more context. + +## 3. Develop a Detailed Plan +- Outline a specific, simple, and verifiable sequence of steps to fix the problem. +- Break down the fix into small, incremental changes. + +## 4. Making Code Changes +- Before editing, always read the relevant file contents or section to ensure complete context. +- If a patch is not applied correctly, attempt to reapply it. +- Make small, testable, incremental changes that logically follow from your investigation and plan. + +## 5. Debugging +- Make code changes only if you have high confidence they can solve the problem +- When debugging, try to determine the root cause rather than addressing symptoms +- Debug for as long as needed to identify the root cause and identify a fix +- Use print statements, logs, or temporary code to inspect program state, including descriptive statements or error messages to understand what's happening +- To test hypotheses, you can also add test statements or functions +- Revisit your assumptions if unexpected behavior occurs. +- You MUST iterate and keep going until the problem is solved. + +## 6. Testing +- Run tests frequently using `make test` and `make testacc` +- After each change, verify correctness by running relevant tests. +- If tests fail, analyze failures and revise your patch. +- Write additional tests if needed to capture important behaviors or edge cases. +- Ensure all tests pass before finalizing. + +## 7. Final Verification +- Confirm the root cause is fixed. +- Review your solution for logic correctness and robustness. +- Iterate until you are extremely confident the fix is complete and all tests pass. +- Run `make lint` to ensure any linting errors have not surfaced with your changes. This task may automatically correct any linting errors, and regenerate documentation. Include any changes in your commit. + +## 8. Final Reflection and Additional Testing +- Reflect carefully on the original intent of the user and the problem statement. +- Think about potential edge cases or scenarios that may not be covered by existing tests. +- Write additional tests that would need to pass to fully validate the correctness of your solution. +- Run these new tests and ensure they all pass. +- Be aware that there are additional hidden tests that must also pass for the solution to be successful. +- Do not assume the task is complete just because the visible tests pass; continue refining until you are confident the fix is robust and comprehensive. + +## 9. Before Submitting Pull Requests +- Run `make docs-generate` to update the documentation, and ensure the results of this command make it into your pull request. + +## Repository Structure + +• **docs/** - Documentation files + • **data-sources/** - Documentation for Terraform data sources + • **guides/** - User guides and tutorials + • **resources/** - Documentation for Terraform resources +• **examples/** - Example Terraform configurations + • **cloud/** - Examples using the cloud to launch testing stacks + • **data-sources/** - Data source usage examples + • **resources/** - Resource usage examples + • **provider/** - Provider configuration examples +• **generated/** - Auto-generated clients from the `generate-clients` make target + • **alerting/** - Kibana alerting API client + • **connectors/** - Kibana connectors API client + • **kbapi/** - Kibana API client + • **slo/** - SLO (Service Level Objective) API client +• **internal/** - Internal Go packages + • **acctest/** - Acceptance test utilities + • **clients/** - API client implementations + • **elasticsearch/** - Elasticsearch-specific logic + • **fleet/** - Fleet management functionality + • **kibana/** - Kibana-specific logic + • **models/** - Data models and structures + • **schema/** - Connection schema definitions for plugin framework + • **utils/** - Utility functions + • **versionutils/** - Version handling utilities +• **libs/** - External libraries + • **go-kibana-rest/** - Kibana REST API client library +• **provider/** - Core Terraform provider implementation +• **scripts/** - Utility scripts for development and CI +• **templates/** - Template files for documentation generation + • **data-sources/** - Data source documentation templates + • **resources/** - Resource documentation templates + • **guides/** - Guide documentation templates +• **xpprovider/** - Additional provider functionality needed for Crossplane + +## Key Guidelines +* Follow Go best practices and idiomatic patterns +* Maintain existing code structure and organization +* Write unit tests for new functionality. Use table-driven unit tests when possible. +* When creating a new Plugin Framework based resource, follow the code organisation of `internal/elasticsearch/security/system_user` +* Avoid adding any extra functionality into the `utils` package, instead preferencing adding to a more specific package or creating one to match the purpose +* Think through your planning first using the codebase as your guide before creating new resources and data sources + From f106c3273c6a7e407a86893e0ad7aebd83c3ac3f Mon Sep 17 00:00:00 2001 From: "Christiane (Tina) Heiligers" Date: Tue, 29 Jul 2025 17:26:58 -0700 Subject: [PATCH 07/66] Adds validation for slo_id (#1221) * Adds validation for slo_id * Updates CHANGELOG.md --- CHANGELOG.md | 1 + docs/resources/kibana_slo.md | 2 +- internal/kibana/slo.go | 7 +- internal/kibana/slo_test.go | 133 +++++++++++++++++++++++++++++++++++ 4 files changed, 141 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index feaedb045..c5e418e54 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,6 @@ ## [Unreleased] +- Add `slo_id` validation to `elasticstack_kibana_slo` ([#1221](https://github.com/elastic/terraform-provider-elasticstack/pull/1221)) - Add `ignore_missing_component_templates` to `elasticstack_elasticsearch_index_template` ([#1206](https://github.com/elastic/terraform-provider-elasticstack/pull/1206)) - Prevent provider panic when a script exists in state, but not in Elasticsearch ([#1218](https://github.com/elastic/terraform-provider-elasticstack/pull/1218)) diff --git a/docs/resources/kibana_slo.md b/docs/resources/kibana_slo.md index c5759e012..0ce7e5788 100644 --- a/docs/resources/kibana_slo.md +++ b/docs/resources/kibana_slo.md @@ -248,7 +248,7 @@ resource "elasticstack_kibana_slo" "timeslice_metric" { - `kql_custom_indicator` (Block List, Max: 1) (see [below for nested schema](#nestedblock--kql_custom_indicator)) - `metric_custom_indicator` (Block List, Max: 1) (see [below for nested schema](#nestedblock--metric_custom_indicator)) - `settings` (Block List, Max: 1) The default settings should be sufficient for most users, but if needed, these properties can be overwritten. (see [below for nested schema](#nestedblock--settings)) -- `slo_id` (String) An ID (8 and 36 characters). If omitted, a UUIDv1 will be generated server-side. +- `slo_id` (String) An ID (8 to 48 characters) that contains only letters, numbers, hyphens, and underscores. If omitted, a UUIDv1 will be generated server-side. - `space_id` (String) An identifier for the space. If space_id is not provided, the default space is used. - `tags` (List of String) The tags for the SLO. - `timeslice_metric_indicator` (Block List, Max: 1) Defines a timeslice metric indicator for SLO. (see [below for nested schema](#nestedblock--timeslice_metric_indicator)) diff --git a/internal/kibana/slo.go b/internal/kibana/slo.go index c09b82838..85ba3e27e 100644 --- a/internal/kibana/slo.go +++ b/internal/kibana/slo.go @@ -3,6 +3,7 @@ package kibana import ( "context" "fmt" + "regexp" "github.com/elastic/terraform-provider-elasticstack/generated/slo" "github.com/elastic/terraform-provider-elasticstack/internal/clients" @@ -81,11 +82,15 @@ func getSchema() map[string]*schema.Schema { return map[string]*schema.Schema{ "slo_id": { - Description: "An ID (8 and 36 characters). If omitted, a UUIDv1 will be generated server-side.", + Description: "An ID (8 to 48 characters) that contains only letters, numbers, hyphens, and underscores. If omitted, a UUIDv1 will be generated server-side.", Type: schema.TypeString, Optional: true, Computed: true, ForceNew: true, + ValidateFunc: validation.All( + validation.StringLenBetween(8, 48), + validation.StringMatch(regexp.MustCompile(`^[a-zA-Z0-9_-]+$`), "must contain only letters, numbers, hyphens, and underscores"), + ), }, "name": { Description: "The name of the SLO.", diff --git a/internal/kibana/slo_test.go b/internal/kibana/slo_test.go index ed41a5c7b..94ab9b762 100644 --- a/internal/kibana/slo_test.go +++ b/internal/kibana/slo_test.go @@ -611,6 +611,83 @@ func TestAccResourceSloErrors(t *testing.T) { }) } +func TestAccResourceSloValidation(t *testing.T) { + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + Config: getSLOConfigWithInvalidSloId("short", "sh", "apm_latency_indicator"), + ExpectError: regexp.MustCompile(`expected length of slo_id to be in the range \(8 - 48\)`), + }, + { + Config: getSLOConfigWithInvalidSloId("toolongid", "this-id-is-way-too-long-and-exceeds-the-48-character-limit-for-slo-ids", "apm_latency_indicator"), + ExpectError: regexp.MustCompile(`expected length of slo_id to be in the range \(8 - 48\)`), + }, + { + Config: getSLOConfigWithInvalidSloId("invalidchars", "invalid@id$", "apm_latency_indicator"), + ExpectError: regexp.MustCompile(`must contain only letters, numbers, hyphens, and underscores`), + }, + }, + }) +} + +func TestSloIdValidation(t *testing.T) { + resource := kibanaresource.ResourceSlo() + sloIdSchema := resource.Schema["slo_id"] + + // Test valid slo_id values + validIds := []string{ + "valid_id", // 8 chars with underscore + "valid-id", // 8 chars with hyphen + "validId123", // 11 chars with mixed case and numbers + "a1234567", // exactly 8 chars + "this-is-a-very-long-but-valid-slo-id-12345678", // exactly 48 chars + } + + for _, id := range validIds { + warnings, errors := sloIdSchema.ValidateFunc(id, "slo_id") + if len(errors) > 0 { + t.Errorf("Expected valid ID %q to pass validation, but got errors: %v", id, errors) + } + if len(warnings) > 0 { + t.Errorf("Expected valid ID %q to have no warnings, but got: %v", id, warnings) + } + } + + // Test invalid slo_id values + invalidTests := []struct { + id string + expectedErr string + }{ + {"short", "expected length of slo_id to be in the range (8 - 48)"}, + {"1234567", "expected length of slo_id to be in the range (8 - 48)"}, // 7 chars + {"this-is-a-very-long-slo-id-that-exceeds-the-48-character-limit-for-sure", "expected length of slo_id to be in the range (8 - 48)"}, // > 48 chars + {"invalid@id", "must contain only letters, numbers, hyphens, and underscores"}, + {"invalid$id", "must contain only letters, numbers, hyphens, and underscores"}, + {"invalid id", "must contain only letters, numbers, hyphens, and underscores"}, // space + {"invalid.id", "must contain only letters, numbers, hyphens, and underscores"}, // period + } + + for _, test := range invalidTests { + _, errors := sloIdSchema.ValidateFunc(test.id, "slo_id") + if len(errors) == 0 { + t.Errorf("Expected invalid ID %q to fail validation", test.id) + } else { + found := false + for _, err := range errors { + if strings.Contains(err.Error(), test.expectedErr) { + found = true + break + } + } + if !found { + t.Errorf("Expected error for ID %q to contain %q, but got: %v", test.id, test.expectedErr, errors) + } + } + } +} + func checkResourceSloDestroy(s *terraform.State) error { client, err := clients.NewAcceptanceTestingClient() if err != nil { @@ -856,3 +933,59 @@ func getSLOConfig(vars sloVars) string { return config } + +func getSLOConfigWithInvalidSloId(name, sloId, indicatorType string) string { + configTemplate := ` + provider "elasticstack" { + elasticsearch {} + kibana {} + } + + resource "elasticstack_elasticsearch_index" "my_index" { + name = "my-index-%s" + deletion_protection = false + } + + resource "elasticstack_kibana_slo" "test_slo" { + name = "%s" + slo_id = "%s" + description = "fully sick SLO" + + %s + + time_window { + duration = "7d" + type = "rolling" + } + + budgeting_method = "timeslices" + + objective { + target = 0.999 + timeslice_target = 0.95 + timeslice_window = "5m" + } + + depends_on = [elasticstack_elasticsearch_index.my_index] + + } + ` + + var indicator string + switch indicatorType { + case "apm_latency_indicator": + indicator = fmt.Sprintf(` + apm_latency_indicator { + environment = "production" + service = "my-service" + transaction_type = "request" + transaction_name = "GET /sup/dawg" + index = "my-index-%s" + threshold = 500 + } + `, name) + } + + config := fmt.Sprintf(configTemplate, name, name, sloId, indicator) + return config +} From 991414aaa6c3d6b520dee0d967e3a62e43ea2bbf Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sat, 2 Aug 2025 12:11:13 +0000 Subject: [PATCH 08/66] chore(deps): update docker.elastic.co/elastic-agent/elastic-agent docker tag to v9.1.0 (#1222) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index e4a77caa1..98aafec03 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -60,7 +60,7 @@ jobs: - 5601:5601 options: --health-cmd="curl http://localhost:5601/api/status" --health-interval=10s --health-timeout=5s --health-retries=10 fleet: - image: docker.elastic.co/elastic-agent/elastic-agent:9.0.3@sha256:c143c196d75078d1633c436fc8ab1e8c0d387d7131e3ace8bac1c1eea6d583ff + image: docker.elastic.co/elastic-agent/elastic-agent:9.1.0@sha256:add63bc9d450c29b9e8bb462e4295fa2ec725db394f372fa392213aad5603553 env: SERVER_NAME: fleet FLEET_ENROLL: "1" From 40c467d5b11fff1c8d6d71293928e522e4b5b8f6 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sun, 3 Aug 2025 14:01:41 +0000 Subject: [PATCH 09/66] fix(deps): update module github.com/hashicorp/terraform-plugin-framework to v1.15.1 (#1225) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 8cecb3913..e06684c15 100644 --- a/go.mod +++ b/go.mod @@ -12,7 +12,7 @@ require ( github.com/google/uuid v1.6.0 github.com/hashicorp/go-cty v1.5.0 github.com/hashicorp/go-version v1.7.0 - github.com/hashicorp/terraform-plugin-framework v1.15.0 + github.com/hashicorp/terraform-plugin-framework v1.15.1 github.com/hashicorp/terraform-plugin-framework-jsontypes v0.2.0 github.com/hashicorp/terraform-plugin-framework-validators v0.18.0 github.com/hashicorp/terraform-plugin-go v0.28.0 diff --git a/go.sum b/go.sum index 8902c2943..78c157633 100644 --- a/go.sum +++ b/go.sum @@ -607,8 +607,8 @@ github.com/hashicorp/terraform-json v0.25.0 h1:rmNqc/CIfcWawGiwXmRuiXJKEiJu1ntGo github.com/hashicorp/terraform-json v0.25.0/go.mod h1:sMKS8fiRDX4rVlR6EJUMudg1WcanxCMoWwTLkgZP/vc= github.com/hashicorp/terraform-plugin-docs v0.22.0 h1:fwIDStbFel1PPNkM+mDPnpB4efHZBdGoMz/zt5FbTDw= github.com/hashicorp/terraform-plugin-docs v0.22.0/go.mod h1:55DJVyZ7BNK4t/lANcQ1YpemRuS6KsvIO1BbGA+xzGE= -github.com/hashicorp/terraform-plugin-framework v1.15.0 h1:LQ2rsOfmDLxcn5EeIwdXFtr03FVsNktbbBci8cOKdb4= -github.com/hashicorp/terraform-plugin-framework v1.15.0/go.mod h1:hxrNI/GY32KPISpWqlCoTLM9JZsGH3CyYlir09bD/fI= +github.com/hashicorp/terraform-plugin-framework v1.15.1 h1:2mKDkwb8rlx/tvJTlIcpw0ykcmvdWv+4gY3SIgk8Pq8= +github.com/hashicorp/terraform-plugin-framework v1.15.1/go.mod h1:hxrNI/GY32KPISpWqlCoTLM9JZsGH3CyYlir09bD/fI= github.com/hashicorp/terraform-plugin-framework-jsontypes v0.2.0 h1:SJXL5FfJJm17554Kpt9jFXngdM6fXbnUnZ6iT2IeiYA= github.com/hashicorp/terraform-plugin-framework-jsontypes v0.2.0/go.mod h1:p0phD0IYhsu9bR4+6OetVvvH59I6LwjXGnTVEr8ox6E= github.com/hashicorp/terraform-plugin-framework-validators v0.18.0 h1:OQnlOt98ua//rCw+QhBbSqfW3QbwtVrcdWeQN5gI3Hw= From 9754e6ad8998e2a9e64fb296c7c5eb735e61132c Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Tue, 5 Aug 2025 02:49:54 +0000 Subject: [PATCH 10/66] chore(deps): update docker.elastic.co/elasticsearch/elasticsearch docker tag to v9.1.0 (#1223) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- libs/go-kibana-rest/docker-compose.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index 98aafec03..8a3de472c 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -33,7 +33,7 @@ jobs: TF_ACC: "1" services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:9.0.3@sha256:b21843a4a2efafcb0475ead137ce0a669fca412739694de833a2697f121a87b9 + image: docker.elastic.co/elasticsearch/elasticsearch:9.1.0@sha256:47f91984aa1065d745f0a7c827bc912ea1ac439c635854902b65b6ed77f62055 env: discovery.type: single-node xpack.security.enabled: true diff --git a/libs/go-kibana-rest/docker-compose.yml b/libs/go-kibana-rest/docker-compose.yml index a153ab0dd..6a2d626a7 100644 --- a/libs/go-kibana-rest/docker-compose.yml +++ b/libs/go-kibana-rest/docker-compose.yml @@ -1,6 +1,6 @@ services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:9.0.3@sha256:b21843a4a2efafcb0475ead137ce0a669fca412739694de833a2697f121a87b9 + image: docker.elastic.co/elasticsearch/elasticsearch:9.1.0@sha256:47f91984aa1065d745f0a7c827bc912ea1ac439c635854902b65b6ed77f62055 environment: cluster.name: test discovery.type: single-node From 18992609b3eda44dfea0b6bcab7d9c0b1b2adaff Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Fri, 8 Aug 2025 02:55:00 +0000 Subject: [PATCH 11/66] chore(deps): update module github.com/golangci/golangci-lint to v2.3.1 (#1234) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index f4d3ccfe4..519c7ad70 100644 --- a/Makefile +++ b/Makefile @@ -246,7 +246,7 @@ install: build ## Install built provider into the local terraform cache .PHONY: tools tools: $(GOBIN) ## Download golangci-lint locally if necessary. - @[[ -f $(GOBIN)/golangci-lint ]] || curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(GOBIN) v2.2.2 + @[[ -f $(GOBIN)/golangci-lint ]] || curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(GOBIN) v2.3.1 .PHONY: golangci-lint golangci-lint: From a03d98d3ede1129edaba6831a0fd2bd6ef7b7f5c Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sun, 10 Aug 2025 13:45:35 +0000 Subject: [PATCH 12/66] chore(deps): update dependency go to v1.24.6 (#1237) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- go.mod | 2 +- libs/go-kibana-rest/go.mod | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/go.mod b/go.mod index e06684c15..1c2291087 100644 --- a/go.mod +++ b/go.mod @@ -2,7 +2,7 @@ module github.com/elastic/terraform-provider-elasticstack go 1.24.0 -toolchain go1.24.5 +toolchain go1.24.6 require ( github.com/disaster37/go-kibana-rest/v8 v8.5.0 diff --git a/libs/go-kibana-rest/go.mod b/libs/go-kibana-rest/go.mod index 0177e052f..44f994eee 100644 --- a/libs/go-kibana-rest/go.mod +++ b/libs/go-kibana-rest/go.mod @@ -2,7 +2,7 @@ module github.com/disaster37/go-kibana-rest/v8 go 1.23.0 -toolchain go1.24.5 +toolchain go1.24.6 require ( github.com/go-resty/resty/v2 v2.16.5 From 2f36387792acf39d87b94c6343ab639edc4df09c Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 01:35:02 +0000 Subject: [PATCH 13/66] chore(deps): update docker.elastic.co/elastic-agent/elastic-agent docker tag to v9.1.1 (#1238) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index 8a3de472c..e0cb146a7 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -60,7 +60,7 @@ jobs: - 5601:5601 options: --health-cmd="curl http://localhost:5601/api/status" --health-interval=10s --health-timeout=5s --health-retries=10 fleet: - image: docker.elastic.co/elastic-agent/elastic-agent:9.1.0@sha256:add63bc9d450c29b9e8bb462e4295fa2ec725db394f372fa392213aad5603553 + image: docker.elastic.co/elastic-agent/elastic-agent:9.1.1@sha256:0a1eeae3aa36d4195f307e98d5e4c16006653dc3e164a17649815e2a2fc4fb13 env: SERVER_NAME: fleet FLEET_ENROLL: "1" From d792c48ed064881868110c53801e2ace42c2a088 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Wed, 13 Aug 2025 21:43:53 +0000 Subject: [PATCH 14/66] fix(deps): update module github.com/elastic/go-elasticsearch/v8 to v8.19.0 (#1241) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 1c2291087..701ad9f86 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.24.6 require ( github.com/disaster37/go-kibana-rest/v8 v8.5.0 github.com/elastic/elastic-transport-go/v8 v8.7.0 - github.com/elastic/go-elasticsearch/v8 v8.18.1 + github.com/elastic/go-elasticsearch/v8 v8.19.0 github.com/google/gofuzz v1.2.0 github.com/google/uuid v1.6.0 github.com/hashicorp/go-cty v1.5.0 diff --git a/go.sum b/go.sum index 78c157633..2770ee49e 100644 --- a/go.sum +++ b/go.sum @@ -350,8 +350,8 @@ github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkp github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/elastic/elastic-transport-go/v8 v8.7.0 h1:OgTneVuXP2uip4BA658Xi6Hfw+PeIOod2rY3GVMGoVE= github.com/elastic/elastic-transport-go/v8 v8.7.0/go.mod h1:YLHer5cj0csTzNFXoNQ8qhtGY1GTvSqPnKWKaqQE3Hk= -github.com/elastic/go-elasticsearch/v8 v8.18.1 h1:lPsN2Wk6+QqBeD4ckmOax7G/Y8tAZgroDYG8j6/5Ce0= -github.com/elastic/go-elasticsearch/v8 v8.18.1/go.mod h1:F3j9e+BubmKvzvLjNui/1++nJuJxbkhHefbaT0kFKGY= +github.com/elastic/go-elasticsearch/v8 v8.19.0 h1:VmfBLNRORY7RZL+9hTxBD97ehl9H8Nxf2QigDh6HuMU= +github.com/elastic/go-elasticsearch/v8 v8.19.0/go.mod h1:F3j9e+BubmKvzvLjNui/1++nJuJxbkhHefbaT0kFKGY= github.com/elazarl/goproxy v1.7.2 h1:Y2o6urb7Eule09PjlhQRGNsqRfPmYI3KKQLFpCAV3+o= github.com/elazarl/goproxy v1.7.2/go.mod h1:82vkLNir0ALaW14Rc399OTTjyNREgmdL2cVoIbS6XaE= github.com/elliotchance/orderedmap/v2 v2.7.0 h1:WHuf0DRo63uLnldCPp9ojm3gskYwEdIIfAUVG5KhoOc= From 14125d9dd0622bd2bb4f39bb15bf8bc0b686c976 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Thu, 14 Aug 2025 23:22:17 +0000 Subject: [PATCH 15/66] chore(deps): update actions/checkout digest to 08eba0b (#1240) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- .github/workflows/test.yml | 6 +++--- libs/go-kibana-rest/.github/workflows/workflow.yml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index e0cb146a7..8b45e8747 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -79,7 +79,7 @@ jobs: options: --restart="unless-stopped" steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 4f7808896..c16c340cc 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 5 steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' @@ -34,7 +34,7 @@ jobs: name: Lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' @@ -128,7 +128,7 @@ jobs: - '8.17.0' - '9.0.3' steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' diff --git a/libs/go-kibana-rest/.github/workflows/workflow.yml b/libs/go-kibana-rest/.github/workflows/workflow.yml index 445a2da5c..705b919fc 100644 --- a/libs/go-kibana-rest/.github/workflows/workflow.yml +++ b/libs/go-kibana-rest/.github/workflows/workflow.yml @@ -18,7 +18,7 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 - name: Setup Go uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: From d41818a9ad2e41574395076290595c022dd6732c Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sat, 16 Aug 2025 10:27:07 +0000 Subject: [PATCH 16/66] chore(deps): update docker.elastic.co/kibana/kibana docker tag to v9.1.2 (#1233) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- libs/go-kibana-rest/docker-compose.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index 8b45e8747..38aca4fe3 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -48,7 +48,7 @@ jobs: - 9200:9200 options: --health-cmd="curl http://localhost:9200/_cluster/health" --health-interval=10s --health-timeout=5s --health-retries=10 kibana: - image: docker.elastic.co/kibana/kibana:9.0.3@sha256:c4c00a485fbc3619d8373f3bc74e9dd5b5a34380ef50442be4366e8fb57cd50a + image: docker.elastic.co/kibana/kibana:9.1.2@sha256:dea5d20df42e6833966deceadb10ecdbf85970b704d17b0abfc3b485622c1a08 env: SERVER_NAME: kibana ELASTICSEARCH_HOSTS: http://elasticsearch:9200 diff --git a/libs/go-kibana-rest/docker-compose.yml b/libs/go-kibana-rest/docker-compose.yml index 6a2d626a7..1190b94d4 100644 --- a/libs/go-kibana-rest/docker-compose.yml +++ b/libs/go-kibana-rest/docker-compose.yml @@ -11,7 +11,7 @@ services: ports: - "9200:9200/tcp" set-kibana-password: - image: docker.elastic.co/kibana/kibana:9.0.3@sha256:c4c00a485fbc3619d8373f3bc74e9dd5b5a34380ef50442be4366e8fb57cd50a + image: docker.elastic.co/kibana/kibana:9.1.2@sha256:dea5d20df42e6833966deceadb10ecdbf85970b704d17b0abfc3b485622c1a08 restart: on-failure links: - elasticsearch @@ -22,7 +22,7 @@ services: elasticsearch: condition: service_started kibana: - image: docker.elastic.co/kibana/kibana:9.0.3@sha256:c4c00a485fbc3619d8373f3bc74e9dd5b5a34380ef50442be4366e8fb57cd50a + image: docker.elastic.co/kibana/kibana:9.1.2@sha256:dea5d20df42e6833966deceadb10ecdbf85970b704d17b0abfc3b485622c1a08 environment: SERVER_NAME: kibana ELASTICSEARCH_HOSTS: http://es:9200 From 8beb2a27e2399752b61e36a6b879b89c2ba17dcb Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 09:05:28 +0200 Subject: [PATCH 17/66] chore(deps): update golang docker tag to v1.25.0 (#1245) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .buildkite/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/release.yml b/.buildkite/release.yml index 1877c6005..aee79f693 100644 --- a/.buildkite/release.yml +++ b/.buildkite/release.yml @@ -1,7 +1,7 @@ steps: - label: Release agents: - image: "golang:1.24.5@sha256:ef5b4be1f94b36c90385abd9b6b4f201723ae28e71acacb76d00687333c17282" + image: "golang:1.25.0@sha256:9e56f0d0f043a68bb8c47c819e47dc29f6e8f5129b8885bed9d43f058f7f3ed6" cpu: "16" memory: "24G" ephemeralStorage: "20G" From b0f7dd9e68968847c9fcb599578493504918dafc Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 09:18:41 +0200 Subject: [PATCH 18/66] chore(deps): update docker.elastic.co/elastic-agent/elastic-agent docker tag to v9.1.2 (#1242) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index 38aca4fe3..d6bb7f861 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -60,7 +60,7 @@ jobs: - 5601:5601 options: --health-cmd="curl http://localhost:5601/api/status" --health-interval=10s --health-timeout=5s --health-retries=10 fleet: - image: docker.elastic.co/elastic-agent/elastic-agent:9.1.1@sha256:0a1eeae3aa36d4195f307e98d5e4c16006653dc3e164a17649815e2a2fc4fb13 + image: docker.elastic.co/elastic-agent/elastic-agent:9.1.2@sha256:942aa0ffe94c268aab83881fc8be0ca0af079c395820ce8e7552f0ce97e0a760 env: SERVER_NAME: fleet FLEET_ENROLL: "1" From 5a665ef8a414f2653a283ad409e3c6daee0b601d Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 09:43:55 +0200 Subject: [PATCH 19/66] chore(deps): update docker.elastic.co/elasticsearch/elasticsearch docker tag to v9.1.2 (#1243) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- libs/go-kibana-rest/docker-compose.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index d6bb7f861..a1587ed7f 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -33,7 +33,7 @@ jobs: TF_ACC: "1" services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:9.1.0@sha256:47f91984aa1065d745f0a7c827bc912ea1ac439c635854902b65b6ed77f62055 + image: docker.elastic.co/elasticsearch/elasticsearch:9.1.2@sha256:d1a8016cf55be8ffec635ed69f5a9acb0c459db35b46a4549ec5b2847a2f170a env: discovery.type: single-node xpack.security.enabled: true diff --git a/libs/go-kibana-rest/docker-compose.yml b/libs/go-kibana-rest/docker-compose.yml index 1190b94d4..80a792946 100644 --- a/libs/go-kibana-rest/docker-compose.yml +++ b/libs/go-kibana-rest/docker-compose.yml @@ -1,6 +1,6 @@ services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:9.1.0@sha256:47f91984aa1065d745f0a7c827bc912ea1ac439c635854902b65b6ed77f62055 + image: docker.elastic.co/elasticsearch/elasticsearch:9.1.2@sha256:d1a8016cf55be8ffec635ed69f5a9acb0c459db35b46a4549ec5b2847a2f170a environment: cluster.name: test discovery.type: single-node From 189a2870c6e2a35cb2014912098b32bbb0d7bebe Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Mon, 18 Aug 2025 09:53:47 +0200 Subject: [PATCH 20/66] chore(deps): update dependency go to v1.25.0 (#1244) * chore(deps): update dependency go to v1.25.0 * Bump golangci-lint --------- Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Co-authored-by: Toby Brain --- Makefile | 2 +- go.mod | 2 +- libs/go-kibana-rest/go.mod | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 519c7ad70..770e0f548 100644 --- a/Makefile +++ b/Makefile @@ -246,7 +246,7 @@ install: build ## Install built provider into the local terraform cache .PHONY: tools tools: $(GOBIN) ## Download golangci-lint locally if necessary. - @[[ -f $(GOBIN)/golangci-lint ]] || curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(GOBIN) v2.3.1 + @[[ -f $(GOBIN)/golangci-lint ]] || curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(GOBIN) v2.4.0 .PHONY: golangci-lint golangci-lint: diff --git a/go.mod b/go.mod index 701ad9f86..8ab4e1cd9 100644 --- a/go.mod +++ b/go.mod @@ -2,7 +2,7 @@ module github.com/elastic/terraform-provider-elasticstack go 1.24.0 -toolchain go1.24.6 +toolchain go1.25.0 require ( github.com/disaster37/go-kibana-rest/v8 v8.5.0 diff --git a/libs/go-kibana-rest/go.mod b/libs/go-kibana-rest/go.mod index 44f994eee..fcadc9441 100644 --- a/libs/go-kibana-rest/go.mod +++ b/libs/go-kibana-rest/go.mod @@ -2,7 +2,7 @@ module github.com/disaster37/go-kibana-rest/v8 go 1.23.0 -toolchain go1.24.6 +toolchain go1.25.0 require ( github.com/go-resty/resty/v2 v2.16.5 From 37820652253e1d702c7f80697ada0d4411282a13 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Thu, 21 Aug 2025 21:23:03 +1000 Subject: [PATCH 21/66] chore(deps): update actions/checkout action to v5 (#1250) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- .github/workflows/test.yml | 6 +++--- libs/go-kibana-rest/.github/workflows/workflow.yml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index a1587ed7f..f7e3a3af4 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -79,7 +79,7 @@ jobs: options: --restart="unless-stopped" steps: - - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c16c340cc..a44ac8897 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 5 steps: - - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' @@ -34,7 +34,7 @@ jobs: name: Lint runs-on: ubuntu-latest steps: - - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' @@ -128,7 +128,7 @@ jobs: - '8.17.0' - '9.0.3' steps: - - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' diff --git a/libs/go-kibana-rest/.github/workflows/workflow.yml b/libs/go-kibana-rest/.github/workflows/workflow.yml index 705b919fc..d170bc598 100644 --- a/libs/go-kibana-rest/.github/workflows/workflow.yml +++ b/libs/go-kibana-rest/.github/workflows/workflow.yml @@ -18,7 +18,7 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@08eba0b27e820071cde6df949e0beb9ba4906955 # v4 + - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - name: Setup Go uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: From 6b2db46d98b9d3815b8107c38e929fde633ad808 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Fri, 22 Aug 2025 06:10:17 +1000 Subject: [PATCH 22/66] Fix copy pasta in datastream lifecycle docs (#1253) --- docs/resources/elasticsearch_data_stream_lifecycle.md | 2 +- .../resources/elasticsearch_data_stream_lifecycle.md.tmpl | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/resources/elasticsearch_data_stream_lifecycle.md b/docs/resources/elasticsearch_data_stream_lifecycle.md index 1c4d913d8..c06eb8096 100644 --- a/docs/resources/elasticsearch_data_stream_lifecycle.md +++ b/docs/resources/elasticsearch_data_stream_lifecycle.md @@ -6,7 +6,7 @@ description: |- Manages Lifecycle for Elasticsearch Data Streams --- -# Resource: elasticstack_elasticsearch_data_stream +# Resource: elasticstack_elasticsearch_data_stream_lifecycle Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html diff --git a/templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl b/templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl index ee163bffb..c45c92f85 100644 --- a/templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl +++ b/templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl @@ -1,12 +1,12 @@ --- subcategory: "Index" layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_data_stream_lifecycle Resource" +page_title: "Elasticstack: {{ .Name }} {{ .Type }}" description: |- Manages Lifecycle for Elasticsearch Data Streams --- -# Resource: elasticstack_elasticsearch_data_stream +# {{ .Type }}: {{ .Name }} Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html From e4d8744813676724c8c6415931b63fc62f021d72 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Aug 2025 21:03:29 +1000 Subject: [PATCH 23/66] Bump github.com/go-viper/mapstructure/v2 from 2.3.0 to 2.4.0 (#1254) Bumps [github.com/go-viper/mapstructure/v2](https://github.com/go-viper/mapstructure) from 2.3.0 to 2.4.0. - [Release notes](https://github.com/go-viper/mapstructure/releases) - [Changelog](https://github.com/go-viper/mapstructure/blob/main/CHANGELOG.md) - [Commits](https://github.com/go-viper/mapstructure/compare/v2.3.0...v2.4.0) --- updated-dependencies: - dependency-name: github.com/go-viper/mapstructure/v2 dependency-version: 2.4.0 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 8ab4e1cd9..1d214d2b2 100644 --- a/go.mod +++ b/go.mod @@ -187,7 +187,7 @@ require ( github.com/go-restruct/restruct v1.2.0-alpha // indirect github.com/go-resty/resty/v2 v2.16.5 // indirect github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1 // indirect - github.com/go-viper/mapstructure/v2 v2.3.0 // indirect + github.com/go-viper/mapstructure/v2 v2.4.0 // indirect github.com/gobwas/glob v0.2.3 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang-jwt/jwt/v4 v4.5.2 // indirect diff --git a/go.sum b/go.sum index 2770ee49e..b649d245e 100644 --- a/go.sum +++ b/go.sum @@ -445,8 +445,8 @@ github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1 h1:wG8n/XJQ07TmjbITcGi github.com/go-telegram-bot-api/telegram-bot-api/v5 v5.5.1/go.mod h1:A2S0CWkNylc2phvKXWBBdD3K0iGnDBGbzRpISP2zBl8= github.com/go-test/deep v1.1.1 h1:0r/53hagsehfO4bzD2Pgr/+RgHqhmf+k1Bpse2cTu1U= github.com/go-test/deep v1.1.1/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= -github.com/go-viper/mapstructure/v2 v2.3.0 h1:27XbWsHIqhbdR5TIC911OfYvgSaW93HM+dX7970Q7jk= -github.com/go-viper/mapstructure/v2 v2.3.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/go-viper/mapstructure/v2 v2.4.0 h1:EBsztssimR/CONLSZZ04E8qAkxNYq4Qp9LvH92wZUgs= +github.com/go-viper/mapstructure/v2 v2.4.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/go-yaml/yaml v2.1.0+incompatible/go.mod h1:w2MrLa16VYP0jy6N7M5kHaCkaLENm+P+Tv+MfurjSw0= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= From 559a9dc5181ac6fdeae0c031a328bec2baa11fe3 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sun, 24 Aug 2025 13:36:13 +1000 Subject: [PATCH 24/66] fix(deps): update module go.uber.org/mock to v0.6.0 (#1259) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- go.mod | 18 +++++++++--------- go.sum | 36 ++++++++++++++++++------------------ 2 files changed, 27 insertions(+), 27 deletions(-) diff --git a/go.mod b/go.mod index 1d214d2b2..48ba46149 100644 --- a/go.mod +++ b/go.mod @@ -23,7 +23,7 @@ require ( github.com/oapi-codegen/oapi-codegen/v2 v2.5.0 github.com/oapi-codegen/runtime v1.1.2 github.com/stretchr/testify v1.10.0 - go.uber.org/mock v0.5.2 + go.uber.org/mock v0.6.0 ) require ( @@ -375,17 +375,17 @@ require ( go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect gocloud.dev v0.41.0 // indirect - golang.org/x/crypto v0.39.0 // indirect + golang.org/x/crypto v0.41.0 // indirect golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 // indirect - golang.org/x/mod v0.25.0 // indirect - golang.org/x/net v0.41.0 // indirect + golang.org/x/mod v0.27.0 // indirect + golang.org/x/net v0.43.0 // indirect golang.org/x/oauth2 v0.30.0 // indirect - golang.org/x/sync v0.15.0 // indirect - golang.org/x/sys v0.33.0 // indirect - golang.org/x/term v0.32.0 // indirect - golang.org/x/text v0.26.0 // indirect + golang.org/x/sync v0.16.0 // indirect + golang.org/x/sys v0.35.0 // indirect + golang.org/x/term v0.34.0 // indirect + golang.org/x/text v0.28.0 // indirect golang.org/x/time v0.11.0 // indirect - golang.org/x/tools v0.34.0 // indirect + golang.org/x/tools v0.36.0 // indirect golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect google.golang.org/api v0.228.0 // indirect google.golang.org/appengine v1.6.8 // indirect diff --git a/go.sum b/go.sum index b649d245e..1b7ae286c 100644 --- a/go.sum +++ b/go.sum @@ -1135,8 +1135,8 @@ go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwE go.uber.org/goleak v1.1.11-0.20210813005559-691160354723/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= -go.uber.org/mock v0.5.2 h1:LbtPTcP8A5k9WPXj54PPPbjcI4Y6lhyOZXn+VS7wNko= -go.uber.org/mock v0.5.2/go.mod h1:wLlUxC2vVTPTaE3UD51E0BGOAElKrILxhVSDYQLld5o= +go.uber.org/mock v0.6.0 h1:hyF9dfmbgIX5EfOdasqLsWD6xqpNZlXblLB/Dbnwv3Y= +go.uber.org/mock v0.6.0/go.mod h1:KiVJ4BqZJaMj4svdfmHM0AUx4NJYO8ZNpPnZn1Z+BBU= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= @@ -1160,8 +1160,8 @@ golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0 golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc= golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= -golang.org/x/crypto v0.39.0 h1:SHs+kF4LP+f+p14esP5jAoDpHU8Gu/v9lFRK6IT5imM= -golang.org/x/crypto v0.39.0/go.mod h1:L+Xg3Wf6HoL4Bn4238Z6ft6KfEpN0tJGo53AAPC632U= +golang.org/x/crypto v0.41.0 h1:WKYxWedPGCTVVl5+WHSSrOBT0O8lx32+zxmHxijgXp4= +golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sUc= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0 h1:R84qjqJb5nVJMxqWYb3np9L5ZsaDtB+a39EqjV0JSUM= golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5NjCrhFrqg6A5zA2E/iPHPhqnS8= @@ -1177,8 +1177,8 @@ golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91 golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.25.0 h1:n7a+ZbQKQA/Ysbyb0/6IbB1H/X41mKgbhfv7AfG/44w= -golang.org/x/mod v0.25.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= +golang.org/x/mod v0.27.0 h1:kb+q2PyFnEADO2IEF935ehFUXlWiNjJWtRNgBLSfbxQ= +golang.org/x/mod v0.27.0/go.mod h1:rWI627Fq0DEoudcK+MBkNkCe0EetEaDSwJJkCcjpazc= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1200,8 +1200,8 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= -golang.org/x/net v0.41.0 h1:vBTly1HeNPEn3wtREYfy4GZ/NECgw2Cnl+nK6Nz3uvw= -golang.org/x/net v0.41.0/go.mod h1:B/K4NNqkfmg07DQYrbwvSluqCJOOXwUjeb/5lOisjbA= +golang.org/x/net v0.43.0 h1:lat02VYK2j4aLzMzecihNvTlJNQUq316m2Mr9rnM6YE= +golang.org/x/net v0.43.0/go.mod h1:vhO1fvI4dGsIjh73sWfUVjj3N7CA9WkKJNQm2svM6Jg= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.30.0 h1:dnDm7JmhM45NNpd8FDDeLhK6FwqbOf4MLCM9zb1BOHI= golang.org/x/oauth2 v0.30.0/go.mod h1:B++QgG3ZKulg6sRPGD/mqlHQs5rB3Ml9erfeDY7xKlU= @@ -1215,8 +1215,8 @@ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.15.0 h1:KWH3jNZsfyT6xfAfKiz6MRNmd46ByHDYaZ7KSkCtdW8= -golang.org/x/sync v0.15.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1252,8 +1252,8 @@ golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= +golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= +golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= @@ -1261,8 +1261,8 @@ golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= -golang.org/x/term v0.32.0 h1:DR4lr0TjUs3epypdhTOkMmuF5CDFJ/8pOnbzMZPQ7bg= -golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ= +golang.org/x/term v0.34.0 h1:O/2T7POpk0ZZ7MAzMeWFSg6S5IpWd/RXDlM9hgM3DR4= +golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= @@ -1272,8 +1272,8 @@ golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.26.0 h1:P42AVeLghgTYr4+xUnTRKDMqpar+PtX7KWuNQL21L8M= -golang.org/x/text v0.26.0/go.mod h1:QK15LZJUUQVJxhz7wXgxSy/CJaTFjd0G+YLonydOVQA= +golang.org/x/text v0.28.0 h1:rhazDwis8INMIwQ4tpjLDzUhx6RlXqZNPEM0huQojng= +golang.org/x/text v0.28.0/go.mod h1:U8nCwOR8jO/marOQ0QbDiOngZVEBB7MAiitBuMjXiNU= golang.org/x/time v0.11.0 h1:/bpjEDfN9tkoN/ryeYHnv5hcMlc8ncjMcM4XBk5NWV0= golang.org/x/time v0.11.0/go.mod h1:CDIdPxbZBQxdj6cxyCIdrNogrJKMJ7pr37NYpMcMDSg= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= @@ -1294,8 +1294,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= -golang.org/x/tools v0.34.0 h1:qIpSLOxeCYGg9TrcJokLBG4KFA6d795g0xkBkiESGlo= -golang.org/x/tools v0.34.0/go.mod h1:pAP9OwEaY1CAW3HOmg3hLZC5Z0CCmzjAF2UQMSqNARg= +golang.org/x/tools v0.36.0 h1:kWS0uv/zsvHEle1LbV5LE8QujrxB3wfQyxHfhOk0Qkg= +golang.org/x/tools v0.36.0/go.mod h1:WBDiHKJK8YgLHlcQPYQzNCkUxUypCaa5ZegCVutKm+s= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= From 053b599353b632b7f9f8beef4a1b0303889b2885 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sun, 24 Aug 2025 13:36:47 +1000 Subject: [PATCH 25/66] chore(deps): update codecov/codecov-action digest to fdcc847 (#1258) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- libs/go-kibana-rest/.github/workflows/workflow.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/go-kibana-rest/.github/workflows/workflow.yml b/libs/go-kibana-rest/.github/workflows/workflow.yml index d170bc598..a399bda23 100644 --- a/libs/go-kibana-rest/.github/workflows/workflow.yml +++ b/libs/go-kibana-rest/.github/workflows/workflow.yml @@ -44,7 +44,7 @@ jobs: run: go build - name: Run test run: make test - - uses: codecov/codecov-action@18283e04ce6e62d37312384ff67231eb8fd56d24 # v5 + - uses: codecov/codecov-action@fdcc8476540edceab3de004e990f80d881c6cc00 # v5 with: files: coverage.out flags: unittests From ecc383b25de94367e6a8ebc103cc59b4a5fd80e6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 27 Aug 2025 12:13:06 +1000 Subject: [PATCH 26/66] Bump github.com/stretchr/testify from 1.10.0 to 1.11.0 (#1261) Bumps [github.com/stretchr/testify](https://github.com/stretchr/testify) from 1.10.0 to 1.11.0. - [Release notes](https://github.com/stretchr/testify/releases) - [Commits](https://github.com/stretchr/testify/compare/v1.10.0...v1.11.0) --- updated-dependencies: - dependency-name: github.com/stretchr/testify dependency-version: 1.11.0 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 48ba46149..5a3b292f0 100644 --- a/go.mod +++ b/go.mod @@ -22,7 +22,7 @@ require ( github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c github.com/oapi-codegen/oapi-codegen/v2 v2.5.0 github.com/oapi-codegen/runtime v1.1.2 - github.com/stretchr/testify v1.10.0 + github.com/stretchr/testify v1.11.0 go.uber.org/mock v0.6.0 ) diff --git a/go.sum b/go.sum index 1b7ae286c..6e0ebf2a9 100644 --- a/go.sum +++ b/go.sum @@ -986,8 +986,8 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.0 h1:ib4sjIrwZKxE5u/Japgo/7SJV3PvgjGiRNAvTVGqQl8= +github.com/stretchr/testify v1.11.0/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/theupdateframework/go-tuf v0.7.0 h1:CqbQFrWo1ae3/I0UCblSbczevCCbS31Qvs5LdxRWqRI= From c9b6a6b387f1ae78c947f972080c9ebb117d6d73 Mon Sep 17 00:00:00 2001 From: Boris Ilyushonak <57406418+biscout42@users.noreply.github.com> Date: Wed, 27 Aug 2025 04:16:02 +0200 Subject: [PATCH 27/66] fix: add `namespace` attribute to `elasticstack_kibana_synthetics_monitor` resource (#1247) It allows to support setting data stream namespace independently from `space_id`. Fixes https://github.com/elastic/terraform-provider-elasticstack/issues/1164. Fixes https://github.com/elastic/terraform-provider-elasticstack/issues/1131. Fixes https://github.com/elastic/terraform-provider-elasticstack/issues/1083. Based on https://github.com/elastic/terraform-provider-elasticstack/pull/1208 --- CHANGELOG.md | 1 + docs/resources/kibana_synthetics_monitor.md | 3 +- internal/kibana/synthetics/acc_test.go | 30 ++++- internal/kibana/synthetics/create.go | 9 +- internal/kibana/synthetics/read.go | 9 +- internal/kibana/synthetics/schema.go | 27 ++++- internal/kibana/synthetics/schema_test.go | 26 +++- internal/kibana/synthetics/update.go | 9 +- libs/go-kibana-rest/docker-compose.yml | 4 +- libs/go-kibana-rest/kbapi/api._.go | 11 -- .../kbapi/api.kibana_dashboard.go | 114 ------------------ .../kbapi/api.kibana_dashboard_test.go | 46 ------- .../kbapi/api.kibana_spaces_test.go | 2 +- .../kbapi/api.kibana_synthetics.go | 38 +++--- .../kbapi/api.kibana_synthetics_test.go | 26 ++-- 15 files changed, 121 insertions(+), 234 deletions(-) delete mode 100644 libs/go-kibana-rest/kbapi/api.kibana_dashboard.go delete mode 100644 libs/go-kibana-rest/kbapi/api.kibana_dashboard_test.go diff --git a/CHANGELOG.md b/CHANGELOG.md index c5e418e54..01b9fadea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ - Add `slo_id` validation to `elasticstack_kibana_slo` ([#1221](https://github.com/elastic/terraform-provider-elasticstack/pull/1221)) - Add `ignore_missing_component_templates` to `elasticstack_elasticsearch_index_template` ([#1206](https://github.com/elastic/terraform-provider-elasticstack/pull/1206)) - Prevent provider panic when a script exists in state, but not in Elasticsearch ([#1218](https://github.com/elastic/terraform-provider-elasticstack/pull/1218)) +- Add `namespace` attribute to `elasticstack_kibana_synthetics_monitor` resource to support setting data stream namespace independently from `space_id` ([#1247](https://github.com/elastic/terraform-provider-elasticstack/pull/1247)) ## [0.11.17] - 2025-07-21 diff --git a/docs/resources/kibana_synthetics_monitor.md b/docs/resources/kibana_synthetics_monitor.md index fabdec368..80d8f591d 100644 --- a/docs/resources/kibana_synthetics_monitor.md +++ b/docs/resources/kibana_synthetics_monitor.md @@ -72,12 +72,13 @@ resource "elasticstack_kibana_synthetics_monitor" "my_monitor" { - `http` (Attributes) HTTP Monitor specific fields (see [below for nested schema](#nestedatt--http)) - `icmp` (Attributes) ICMP Monitor specific fields (see [below for nested schema](#nestedatt--icmp)) - `locations` (List of String) Where to deploy the monitor. Monitors can be deployed in multiple locations so that you can detect differences in availability and response times across those locations. +- `namespace` (String) The data stream namespace. Note: if you change its value, kibana creates new datastream. A user needs permissions for new/old datastream in update case to be able to see full monitor history. The `namespace` field should be lowercase and not contain spaces. The namespace must not include any of the following characters: *, \, /, ?, ", <, >, |, whitespace, ,, #, :, or -. Default: `default` - `params` (String) Monitor parameters. Raw JSON object, use `jsonencode` function to represent JSON - `private_locations` (List of String) These Private Locations refer to locations hosted and managed by you, whereas locations are hosted by Elastic. You can specify a Private Location using the location’s name. - `retest_on_failure` (Boolean) Enable or disable retesting when a monitor fails. By default, monitors are automatically retested if the monitor goes from "up" to "down". If the result of the retest is also "down", an error will be created, and if configured, an alert sent. Then the monitor will resume running according to the defined schedule. Using retest_on_failure can reduce noise related to transient problems. Default: `true`. - `schedule` (Number) The monitor’s schedule in minutes. Supported values are 1, 3, 5, 10, 15, 30, 60, 120 and 240. - `service_name` (String) The APM service name. -- `space_id` (String) The namespace field should be lowercase and not contain spaces. The namespace must not include any of the following characters: *, \, /, ?, ", <, >, |, whitespace, ,, #, :, or -. Default: `default` +- `space_id` (String) Kibana space. The space ID that is part of the Kibana URL when inside the space. Space IDs are limited to lowercase alphanumeric, underscore, and hyphen characters (a-z, 0-9, _, and -). You are cannot change the ID with the update operation. - `tags` (List of String) An array of tags. - `tcp` (Attributes) TCP Monitor specific fields (see [below for nested schema](#nestedatt--tcp)) - `timeout` (Number) The monitor timeout in seconds, monitor will fail if it doesn’t complete within this time. Default: `16` diff --git a/internal/kibana/synthetics/acc_test.go b/internal/kibana/synthetics/acc_test.go index dca2ed096..346fafbcd 100644 --- a/internal/kibana/synthetics/acc_test.go +++ b/internal/kibana/synthetics/acc_test.go @@ -32,6 +32,7 @@ resource "elasticstack_kibana_synthetics_monitor" "%s" { resource "elasticstack_kibana_synthetics_monitor" "%s" { name = "TestHttpMonitorResource - %s" space_id = "testacc" + namespace = "test_namespace" schedule = 5 private_locations = [elasticstack_kibana_synthetics_private_location.%s.label] enabled = true @@ -164,6 +165,7 @@ resource "elasticstack_kibana_synthetics_monitor" "%s" { resource "elasticstack_kibana_synthetics_monitor" "%s" { name = "TestTcpMonitorResource - %s" space_id = "testacc" + namespace = "testacc_test" schedule = 5 private_locations = [elasticstack_kibana_synthetics_private_location.%s.label] enabled = true @@ -230,6 +232,7 @@ resource "elasticstack_kibana_synthetics_monitor" "%s" { resource "elasticstack_kibana_synthetics_monitor" "%s" { name = "TestIcmpMonitorResource - %s" space_id = "testacc" + namespace = "testacc_namespace" schedule = 5 private_locations = [elasticstack_kibana_synthetics_private_location.%s.label] enabled = true @@ -279,6 +282,7 @@ resource "elasticstack_kibana_synthetics_monitor" "%s" { resource "elasticstack_kibana_synthetics_monitor" "%s" { name = "TestBrowserMonitorResource - %s" space_id = "testacc" + namespace = "testacc_ns" schedule = 5 private_locations = [elasticstack_kibana_synthetics_private_location.%s.label] enabled = true @@ -363,7 +367,8 @@ func TestSyntheticMonitorHTTPResource(t *testing.T) { Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttrSet(bmMonitorId, "id"), resource.TestCheckResourceAttr(bmMonitorId, "name", "TestHttpMonitorResource - "+bmName), - resource.TestCheckResourceAttr(bmMonitorId, "space_id", "default"), + resource.TestCheckResourceAttr(bmMonitorId, "space_id", ""), + resource.TestCheckResourceAttr(bmMonitorId, "namespace", "default"), resource.TestCheckResourceAttr(bmMonitorId, "alert.status.enabled", "true"), resource.TestCheckResourceAttr(bmMonitorId, "alert.tls.enabled", "true"), resource.TestCheckResourceAttr(bmMonitorId, "http.url", "http://localhost:5601"), @@ -376,7 +381,8 @@ func TestSyntheticMonitorHTTPResource(t *testing.T) { Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttrSet(sslHttpMonitorId, "id"), resource.TestCheckResourceAttr(sslHttpMonitorId, "name", "TestHttpMonitorResource - "+sslName), - resource.TestCheckResourceAttr(sslHttpMonitorId, "space_id", "default"), + resource.TestCheckResourceAttr(sslHttpMonitorId, "space_id", ""), + resource.TestCheckResourceAttr(sslHttpMonitorId, "namespace", "default"), resource.TestCheckResourceAttr(sslHttpMonitorId, "http.url", "http://localhost:5601"), resource.TestCheckResourceAttr(sslHttpMonitorId, "http.ssl_verification_mode", "full"), resource.TestCheckResourceAttr(sslHttpMonitorId, "http.ssl_supported_protocols.#", "1"), @@ -405,6 +411,7 @@ func TestSyntheticMonitorHTTPResource(t *testing.T) { resource.TestCheckResourceAttrSet(httpMonitorId, "id"), resource.TestCheckResourceAttr(httpMonitorId, "name", "TestHttpMonitorResource - "+name), resource.TestCheckResourceAttr(httpMonitorId, "space_id", "testacc"), + resource.TestCheckResourceAttr(httpMonitorId, "namespace", "test_namespace"), resource.TestCheckResourceAttr(httpMonitorId, "schedule", "5"), resource.TestCheckResourceAttr(httpMonitorId, "private_locations.#", "1"), resource.TestCheckResourceAttrSet(httpMonitorId, "private_locations.0"), @@ -446,6 +453,7 @@ func TestSyntheticMonitorHTTPResource(t *testing.T) { resource.TestCheckResourceAttrSet(httpMonitorId, "id"), resource.TestCheckResourceAttr(httpMonitorId, "name", "TestHttpMonitorResource Updated - "+name), resource.TestCheckResourceAttr(httpMonitorId, "space_id", "testacc"), + resource.TestCheckResourceAttr(httpMonitorId, "namespace", "test_namespace"), resource.TestCheckResourceAttr(httpMonitorId, "schedule", "10"), resource.TestCheckResourceAttr(httpMonitorId, "private_locations.#", "1"), resource.TestCheckResourceAttrSet(httpMonitorId, "private_locations.0"), @@ -509,7 +517,8 @@ func TestSyntheticMonitorTCPResource(t *testing.T) { Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttrSet(bmMonitorId, "id"), resource.TestCheckResourceAttr(bmMonitorId, "name", "TestTcpMonitorResource - "+bmName), - resource.TestCheckResourceAttr(bmMonitorId, "space_id", "default"), + resource.TestCheckResourceAttr(bmMonitorId, "space_id", ""), + resource.TestCheckResourceAttr(bmMonitorId, "namespace", "default"), resource.TestCheckResourceAttr(bmMonitorId, "tcp.host", "http://localhost:5601"), resource.TestCheckResourceAttr(bmMonitorId, "alert.status.enabled", "true"), resource.TestCheckResourceAttr(bmMonitorId, "alert.tls.enabled", "true"), @@ -523,7 +532,8 @@ func TestSyntheticMonitorTCPResource(t *testing.T) { Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttrSet(sslTcpMonitorId, "id"), resource.TestCheckResourceAttr(sslTcpMonitorId, "name", "TestHttpMonitorResource - "+sslName), - resource.TestCheckResourceAttr(sslTcpMonitorId, "space_id", "default"), + resource.TestCheckResourceAttr(sslTcpMonitorId, "space_id", ""), + resource.TestCheckResourceAttr(sslTcpMonitorId, "namespace", "default"), resource.TestCheckResourceAttr(sslTcpMonitorId, "tcp.host", "http://localhost:5601"), resource.TestCheckResourceAttr(sslTcpMonitorId, "tcp.ssl_verification_mode", "full"), resource.TestCheckResourceAttr(sslTcpMonitorId, "tcp.ssl_supported_protocols.#", "1"), @@ -552,6 +562,7 @@ func TestSyntheticMonitorTCPResource(t *testing.T) { resource.TestCheckResourceAttrSet(tcpMonitorId, "id"), resource.TestCheckResourceAttr(tcpMonitorId, "name", "TestTcpMonitorResource - "+name), resource.TestCheckResourceAttr(tcpMonitorId, "space_id", "testacc"), + resource.TestCheckResourceAttr(tcpMonitorId, "namespace", "testacc_test"), resource.TestCheckResourceAttr(tcpMonitorId, "schedule", "5"), resource.TestCheckResourceAttr(tcpMonitorId, "private_locations.#", "1"), resource.TestCheckResourceAttrSet(tcpMonitorId, "private_locations.0"), @@ -590,6 +601,7 @@ func TestSyntheticMonitorTCPResource(t *testing.T) { resource.TestCheckResourceAttrSet(tcpMonitorId, "id"), resource.TestCheckResourceAttr(tcpMonitorId, "name", "TestTcpMonitorResource Updated - "+name), resource.TestCheckResourceAttr(tcpMonitorId, "space_id", "testacc"), + resource.TestCheckResourceAttr(tcpMonitorId, "namespace", "testacc_test"), resource.TestCheckResourceAttr(tcpMonitorId, "schedule", "10"), resource.TestCheckResourceAttr(tcpMonitorId, "private_locations.#", "1"), resource.TestCheckResourceAttrSet(tcpMonitorId, "private_locations.0"), @@ -643,7 +655,8 @@ func TestSyntheticMonitorICMPResource(t *testing.T) { Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttrSet(bmMonitorId, "id"), resource.TestCheckResourceAttr(bmMonitorId, "name", "TestIcmpMonitorResource - "+bmName), - resource.TestCheckResourceAttr(bmMonitorId, "space_id", "default"), + resource.TestCheckResourceAttr(bmMonitorId, "space_id", ""), + resource.TestCheckResourceAttr(bmMonitorId, "namespace", "default"), resource.TestCheckResourceAttr(bmMonitorId, "icmp.host", "localhost"), resource.TestCheckResourceAttr(bmMonitorId, "alert.status.enabled", "true"), resource.TestCheckResourceAttr(bmMonitorId, "alert.tls.enabled", "true"), @@ -658,6 +671,7 @@ func TestSyntheticMonitorICMPResource(t *testing.T) { resource.TestCheckResourceAttrSet(icmpMonitorId, "id"), resource.TestCheckResourceAttr(icmpMonitorId, "name", "TestIcmpMonitorResource - "+name), resource.TestCheckResourceAttr(icmpMonitorId, "space_id", "testacc"), + resource.TestCheckResourceAttr(icmpMonitorId, "namespace", "testacc_namespace"), resource.TestCheckResourceAttr(icmpMonitorId, "schedule", "5"), resource.TestCheckResourceAttr(icmpMonitorId, "private_locations.#", "1"), resource.TestCheckResourceAttrSet(icmpMonitorId, "private_locations.0"), @@ -689,6 +703,7 @@ func TestSyntheticMonitorICMPResource(t *testing.T) { resource.TestCheckResourceAttrSet(icmpMonitorId, "id"), resource.TestCheckResourceAttr(icmpMonitorId, "name", "TestIcmpMonitorResource Updated - "+name), resource.TestCheckResourceAttr(icmpMonitorId, "space_id", "testacc"), + resource.TestCheckResourceAttr(icmpMonitorId, "namespace", "testacc_namespace"), resource.TestCheckResourceAttr(icmpMonitorId, "schedule", "10"), resource.TestCheckResourceAttr(icmpMonitorId, "private_locations.#", "1"), resource.TestCheckResourceAttrSet(icmpMonitorId, "private_locations.0"), @@ -735,7 +750,8 @@ func TestSyntheticMonitorBrowserResource(t *testing.T) { Check: resource.ComposeAggregateTestCheckFunc( resource.TestCheckResourceAttrSet(bmMonitorId, "id"), resource.TestCheckResourceAttr(bmMonitorId, "name", "TestBrowserMonitorResource - "+bmName), - resource.TestCheckResourceAttr(bmMonitorId, "space_id", "default"), + resource.TestCheckResourceAttr(bmMonitorId, "space_id", ""), + resource.TestCheckResourceAttr(bmMonitorId, "namespace", "default"), resource.TestCheckResourceAttr(bmMonitorId, "browser.inline_script", "step('Go to https://google.com.co', () => page.goto('https://www.google.com'))"), resource.TestCheckResourceAttr(bmMonitorId, "alert.status.enabled", "true"), resource.TestCheckResourceAttr(bmMonitorId, "alert.tls.enabled", "true"), @@ -749,6 +765,7 @@ func TestSyntheticMonitorBrowserResource(t *testing.T) { resource.TestCheckResourceAttrSet(browserMonitorId, "id"), resource.TestCheckResourceAttr(browserMonitorId, "name", "TestBrowserMonitorResource - "+name), resource.TestCheckResourceAttr(browserMonitorId, "space_id", "testacc"), + resource.TestCheckResourceAttr(browserMonitorId, "namespace", "testacc_ns"), resource.TestCheckResourceAttr(browserMonitorId, "schedule", "5"), resource.TestCheckResourceAttr(browserMonitorId, "private_locations.#", "1"), resource.TestCheckResourceAttrSet(browserMonitorId, "private_locations.0"), @@ -780,6 +797,7 @@ func TestSyntheticMonitorBrowserResource(t *testing.T) { resource.TestCheckResourceAttrSet(browserMonitorId, "id"), resource.TestCheckResourceAttr(browserMonitorId, "name", "TestBrowserMonitorResource Updated - "+name), resource.TestCheckResourceAttr(browserMonitorId, "space_id", "testacc"), + resource.TestCheckResourceAttr(browserMonitorId, "namespace", "testacc_ns"), resource.TestCheckResourceAttr(browserMonitorId, "schedule", "10"), resource.TestCheckResourceAttr(browserMonitorId, "private_locations.#", "1"), resource.TestCheckResourceAttrSet(browserMonitorId, "private_locations.0"), diff --git a/internal/kibana/synthetics/create.go b/internal/kibana/synthetics/create.go index 830b9aa73..2aacea31a 100644 --- a/internal/kibana/synthetics/create.go +++ b/internal/kibana/synthetics/create.go @@ -3,6 +3,7 @@ package synthetics import ( "context" "fmt" + "github.com/hashicorp/terraform-plugin-framework/resource" ) @@ -26,14 +27,14 @@ func (r *Resource) Create(ctx context.Context, request resource.CreateRequest, r return } - namespace := plan.SpaceID.ValueString() - result, err := kibanaClient.KibanaSynthetics.Monitor.Add(ctx, input.config, input.fields, namespace) + spaceId := plan.SpaceID.ValueString() + result, err := kibanaClient.KibanaSynthetics.Monitor.Add(ctx, input.config, input.fields, spaceId) if err != nil { - response.Diagnostics.AddError(fmt.Sprintf("Failed to create Kibana monitor `%s`, namespace %s", input.config.Name, namespace), err.Error()) + response.Diagnostics.AddError(fmt.Sprintf("Failed to create Kibana monitor `%s`, space %s", input.config.Name, spaceId), err.Error()) return } - plan, diags = plan.toModelV0(ctx, result) + plan, diags = plan.toModelV0(ctx, result, spaceId) response.Diagnostics.Append(diags...) if response.Diagnostics.HasError() { return diff --git a/internal/kibana/synthetics/read.go b/internal/kibana/synthetics/read.go index e7a13db7f..8d13887fd 100644 --- a/internal/kibana/synthetics/read.go +++ b/internal/kibana/synthetics/read.go @@ -4,6 +4,7 @@ import ( "context" "errors" "fmt" + "github.com/disaster37/go-kibana-rest/v8/kbapi" "github.com/hashicorp/terraform-plugin-framework/resource" ) @@ -28,9 +29,9 @@ func (r *Resource) Read(ctx context.Context, request resource.ReadRequest, respo return } - namespace := compositeId.ClusterId + spaceId := compositeId.ClusterId monitorId := kbapi.MonitorID(compositeId.ResourceId) - result, err := kibanaClient.KibanaSynthetics.Monitor.Get(ctx, monitorId, namespace) + result, err := kibanaClient.KibanaSynthetics.Monitor.Get(ctx, monitorId, spaceId) if err != nil { var apiError *kbapi.APIError if errors.As(err, &apiError) && apiError.Code == 404 { @@ -38,11 +39,11 @@ func (r *Resource) Read(ctx context.Context, request resource.ReadRequest, respo return } - response.Diagnostics.AddError(fmt.Sprintf("Failed to get monitor `%s`, namespace %s", monitorId, namespace), err.Error()) + response.Diagnostics.AddError(fmt.Sprintf("Failed to get monitor `%s`, space %s", monitorId, spaceId), err.Error()) return } - state, diags = state.toModelV0(ctx, result) + state, diags = state.toModelV0(ctx, result, spaceId) response.Diagnostics.Append(diags...) if response.Diagnostics.HasError() { return diff --git a/internal/kibana/synthetics/schema.go b/internal/kibana/synthetics/schema.go index e77fce16e..365a9ee90 100644 --- a/internal/kibana/synthetics/schema.go +++ b/internal/kibana/synthetics/schema.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "fmt" + "regexp" "strconv" "github.com/disaster37/go-kibana-rest/v8/kbapi" @@ -99,6 +100,7 @@ type tfModelV0 struct { ID types.String `tfsdk:"id"` Name types.String `tfsdk:"name"` SpaceID types.String `tfsdk:"space_id"` + Namespace types.String `tfsdk:"namespace"` Schedule types.Int64 `tfsdk:"schedule"` Locations []types.String `tfsdk:"locations"` PrivateLocations []types.String `tfsdk:"private_locations"` @@ -143,7 +145,7 @@ func monitorConfigSchema() schema.Schema { MarkdownDescription: "The monitor’s name.", }, "space_id": schema.StringAttribute{ - MarkdownDescription: "The namespace field should be lowercase and not contain spaces. The namespace must not include any of the following characters: *, \\, /, ?, \", <, >, |, whitespace, ,, #, :, or -. Default: `default`", + MarkdownDescription: "Kibana space. The space ID that is part of the Kibana URL when inside the space. Space IDs are limited to lowercase alphanumeric, underscore, and hyphen characters (a-z, 0-9, _, and -). You are cannot change the ID with the update operation.", Optional: true, PlanModifiers: []planmodifier.String{ stringplanmodifier.UseStateForUnknown(), @@ -151,6 +153,20 @@ func monitorConfigSchema() schema.Schema { }, Computed: true, }, + "namespace": schema.StringAttribute{ + MarkdownDescription: "The data stream namespace. Note: if you change its value, kibana creates new datastream. A user needs permissions for new/old datastream in update case to be able to see full monitor history. The `namespace` field should be lowercase and not contain spaces. The namespace must not include any of the following characters: *, \\, /, ?, \", <, >, |, whitespace, ,, #, :, or -. Default: `default`", + Optional: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + Computed: true, + Validators: []validator.String{ + stringvalidator.RegexMatches( + regexp.MustCompile(`^[^*\\/?\"<>|\s,#:-]*$`), + "namespace must not contain any of the following characters: *, \\, /, ?, \", <, >, |, whitespace, ,, #, :, or -", + ), + }, + }, "schedule": schema.Int64Attribute{ Optional: true, MarkdownDescription: "The monitor’s schedule in minutes. Supported values are 1, 3, 5, 10, 15, 30, 60, 120 and 240.", @@ -566,7 +582,7 @@ func stringToInt64(v string) (int64, error) { return res, err } -func (v *tfModelV0) toModelV0(ctx context.Context, api *kbapi.SyntheticsMonitor) (*tfModelV0, diag.Diagnostics) { +func (v *tfModelV0) toModelV0(ctx context.Context, api *kbapi.SyntheticsMonitor, space string) (*tfModelV0, diag.Diagnostics) { var schedule int64 var err error dg := diag.Diagnostics{} @@ -640,7 +656,7 @@ func (v *tfModelV0) toModelV0(ctx context.Context, api *kbapi.SyntheticsMonitor) } resourceID := clients.CompositeId{ - ClusterId: api.Namespace, + ClusterId: space, ResourceId: string(api.Id), } @@ -652,7 +668,8 @@ func (v *tfModelV0) toModelV0(ctx context.Context, api *kbapi.SyntheticsMonitor) return &tfModelV0{ ID: types.StringValue(resourceID.String()), Name: types.StringValue(api.Name), - SpaceID: types.StringValue(api.Namespace), + SpaceID: types.StringValue(space), + Namespace: types.StringValue(api.Namespace), Schedule: types.Int64Value(schedule), Locations: v.Locations, PrivateLocations: StringSliceValue(privateLocLabels), @@ -883,7 +900,7 @@ func (v *tfModelV0) toSyntheticsMonitorConfig(ctx context.Context) (*kbapi.Synth Alert: toTFAlertConfig(ctx, v.Alert), APMServiceName: v.APMServiceName.ValueString(), TimeoutSeconds: int(v.TimeoutSeconds.ValueInt64()), - Namespace: v.SpaceID.ValueString(), + Namespace: v.Namespace.ValueString(), Params: params, RetestOnFailure: v.RetestOnFailure.ValueBoolPointer(), }, diag.Diagnostics{} //dg diff --git a/internal/kibana/synthetics/schema_test.go b/internal/kibana/synthetics/schema_test.go index 1711530bf..e5c5bb60a 100644 --- a/internal/kibana/synthetics/schema_test.go +++ b/internal/kibana/synthetics/schema_test.go @@ -3,9 +3,10 @@ package synthetics import ( "context" "encoding/json" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" "testing" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/disaster37/go-kibana-rest/v8/kbapi" "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" "github.com/hashicorp/terraform-plugin-framework/attr" @@ -49,6 +50,7 @@ func TestToModelV0(t *testing.T) { ID: types.StringValue("/"), Name: types.StringValue(""), SpaceID: types.StringValue(""), + Namespace: types.StringValue(""), Schedule: types.Int64Value(0), APMServiceName: types.StringValue(""), TimeoutSeconds: types.Int64Value(0), @@ -83,6 +85,7 @@ func TestToModelV0(t *testing.T) { ID: types.StringValue("/"), Name: types.StringValue(""), SpaceID: types.StringValue(""), + Namespace: types.StringValue(""), Schedule: types.Int64Value(0), APMServiceName: types.StringValue(""), TimeoutSeconds: types.Int64Value(0), @@ -111,6 +114,7 @@ func TestToModelV0(t *testing.T) { ID: types.StringValue("/"), Name: types.StringValue(""), SpaceID: types.StringValue(""), + Namespace: types.StringValue(""), Schedule: types.Int64Value(0), APMServiceName: types.StringValue(""), TimeoutSeconds: types.Int64Value(0), @@ -130,6 +134,7 @@ func TestToModelV0(t *testing.T) { ID: types.StringValue("/"), Name: types.StringValue(""), SpaceID: types.StringValue(""), + Namespace: types.StringValue(""), Schedule: types.Int64Value(0), APMServiceName: types.StringValue(""), TimeoutSeconds: types.Int64Value(0), @@ -191,6 +196,7 @@ func TestToModelV0(t *testing.T) { ID: types.StringValue("default/test-id-http"), Name: types.StringValue("test-name-http"), SpaceID: types.StringValue("default"), + Namespace: types.StringValue("default"), Schedule: types.Int64Value(5), Locations: []types.String{types.StringValue("us_east")}, PrivateLocations: []types.String{types.StringValue("test private location")}, @@ -229,7 +235,7 @@ func TestToModelV0(t *testing.T) { input: kbapi.SyntheticsMonitor{ Id: "test-id-tcp", Name: "test-name-tcp", - Namespace: "default", + Namespace: "default-2", Enabled: tBool, Alert: &kbapi.MonitorAlertConfig{Status: &kbapi.SyntheticsStatusConfig{Enabled: tBool}}, Schedule: &kbapi.MonitorScheduleConfig{Number: "5", Unit: "m"}, @@ -261,6 +267,7 @@ func TestToModelV0(t *testing.T) { ID: types.StringValue("default/test-id-tcp"), Name: types.StringValue("test-name-tcp"), SpaceID: types.StringValue("default"), + Namespace: types.StringValue("default-2"), Schedule: types.Int64Value(5), Locations: nil, PrivateLocations: []types.String{types.StringValue("test private location")}, @@ -320,6 +327,7 @@ func TestToModelV0(t *testing.T) { ID: types.StringValue("default/test-id-icmp"), Name: types.StringValue("test-name-icmp"), SpaceID: types.StringValue("default"), + Namespace: types.StringValue("default"), Schedule: types.Int64Value(5), Locations: nil, PrivateLocations: []types.String{types.StringValue("test private location")}, @@ -375,6 +383,7 @@ func TestToModelV0(t *testing.T) { ID: types.StringValue("default/test-id-browser"), Name: types.StringValue("test-name-browser"), SpaceID: types.StringValue("default"), + Namespace: types.StringValue("default"), Schedule: types.Int64Value(5), Locations: nil, PrivateLocations: []types.String{types.StringValue("test private location")}, @@ -398,7 +407,7 @@ func TestToModelV0(t *testing.T) { for _, tt := range testcases { t.Run(tt.name, func(t *testing.T) { ctx := context.Background() - model, diag := tt.expected.toModelV0(ctx, &tt.input) + model, diag := tt.expected.toModelV0(ctx, &tt.input, tt.expected.SpaceID.ValueString()) assert.False(t, diag.HasError()) assert.Equal(t, &tt.expected, model) }) @@ -457,6 +466,7 @@ func TestToKibanaAPIRequest(t *testing.T) { ID: types.StringValue("test-id-http"), Name: types.StringValue("test-name-http"), SpaceID: types.StringValue("default"), + Namespace: types.StringValue("default-3"), Schedule: types.Int64Value(5), Locations: []types.String{types.StringValue("us_east")}, PrivateLocations: []types.String{types.StringValue("test private location")}, @@ -500,7 +510,7 @@ func TestToKibanaAPIRequest(t *testing.T) { Tags: []string{"tag1", "tag2"}, Alert: &kbapi.MonitorAlertConfig{Status: &kbapi.SyntheticsStatusConfig{Enabled: tBool}, Tls: &kbapi.SyntheticsStatusConfig{Enabled: fBool}}, APMServiceName: "test-service-http", - Namespace: "default", + Namespace: "default-3", TimeoutSeconds: 30, Params: kbapi.JsonObject{"param1": "value1"}, }, @@ -533,6 +543,7 @@ func TestToKibanaAPIRequest(t *testing.T) { ID: types.StringValue("test-id-tcp"), Name: types.StringValue("test-name-tcp"), SpaceID: types.StringValue("default"), + Namespace: types.StringValue("default"), Schedule: types.Int64Value(5), Locations: []types.String{types.StringValue("us_east")}, PrivateLocations: nil, @@ -597,6 +608,7 @@ func TestToKibanaAPIRequest(t *testing.T) { ID: types.StringValue("test-id-icmp"), Name: types.StringValue("test-name-icmp"), SpaceID: types.StringValue("default"), + Namespace: types.StringValue("default"), Schedule: types.Int64Value(5), Locations: []types.String{types.StringValue("us_east")}, PrivateLocations: nil, @@ -637,6 +649,7 @@ func TestToKibanaAPIRequest(t *testing.T) { ID: types.StringValue("test-id-browser"), Name: types.StringValue("test-name-browser"), SpaceID: types.StringValue("default"), + Namespace: types.StringValue("default"), Schedule: types.Int64Value(5), Locations: []types.String{types.StringValue("us_east")}, PrivateLocations: nil, @@ -722,6 +735,7 @@ func TestToModelV0MergeAttributes(t *testing.T) { ID: types.StringValue("/"), Name: types.StringValue(""), SpaceID: types.StringValue(""), + Namespace: types.StringValue(""), Schedule: types.Int64Value(0), APMServiceName: types.StringValue(""), TimeoutSeconds: types.Int64Value(0), @@ -767,6 +781,7 @@ func TestToModelV0MergeAttributes(t *testing.T) { ID: types.StringValue("/"), Name: types.StringValue(""), SpaceID: types.StringValue(""), + Namespace: types.StringValue(""), Schedule: types.Int64Value(0), APMServiceName: types.StringValue(""), TimeoutSeconds: types.Int64Value(0), @@ -801,6 +816,7 @@ func TestToModelV0MergeAttributes(t *testing.T) { ID: types.StringValue("/"), Name: types.StringValue(""), SpaceID: types.StringValue(""), + Namespace: types.StringValue(""), Schedule: types.Int64Value(0), APMServiceName: types.StringValue(""), TimeoutSeconds: types.Int64Value(0), @@ -816,7 +832,7 @@ func TestToModelV0MergeAttributes(t *testing.T) { for _, tt := range testcases { t.Run(tt.name, func(t *testing.T) { ctx := context.Background() - actual, diag := tt.state.toModelV0(ctx, &tt.input) + actual, diag := tt.state.toModelV0(ctx, &tt.input, tt.state.SpaceID.ValueString()) assert.False(t, diag.HasError()) assert.NotNil(t, actual) assert.Equal(t, &tt.expected, actual) diff --git a/internal/kibana/synthetics/update.go b/internal/kibana/synthetics/update.go index c7544622b..3902615b3 100644 --- a/internal/kibana/synthetics/update.go +++ b/internal/kibana/synthetics/update.go @@ -3,6 +3,7 @@ package synthetics import ( "context" "fmt" + "github.com/disaster37/go-kibana-rest/v8/kbapi" "github.com/hashicorp/terraform-plugin-framework/resource" ) @@ -33,14 +34,14 @@ func (r *Resource) Update(ctx context.Context, request resource.UpdateRequest, r return } - namespace := plan.SpaceID.ValueString() - result, err := kibanaClient.KibanaSynthetics.Monitor.Update(ctx, kbapi.MonitorID(monitorId.ResourceId), input.config, input.fields, namespace) + spaceId := plan.SpaceID.ValueString() + result, err := kibanaClient.KibanaSynthetics.Monitor.Update(ctx, kbapi.MonitorID(monitorId.ResourceId), input.config, input.fields, spaceId) if err != nil { - response.Diagnostics.AddError(fmt.Sprintf("Failed to update Kibana monitor `%s`, namespace %s", input.config.Name, namespace), err.Error()) + response.Diagnostics.AddError(fmt.Sprintf("Failed to update Kibana monitor `%s`, space %s", input.config.Name, spaceId), err.Error()) return } - plan, diags = plan.toModelV0(ctx, result) + plan, diags = plan.toModelV0(ctx, result, spaceId) response.Diagnostics.Append(diags...) if response.Diagnostics.HasError() { return diff --git a/libs/go-kibana-rest/docker-compose.yml b/libs/go-kibana-rest/docker-compose.yml index 80a792946..515a53e3c 100644 --- a/libs/go-kibana-rest/docker-compose.yml +++ b/libs/go-kibana-rest/docker-compose.yml @@ -11,7 +11,7 @@ services: ports: - "9200:9200/tcp" set-kibana-password: - image: docker.elastic.co/kibana/kibana:9.1.2@sha256:dea5d20df42e6833966deceadb10ecdbf85970b704d17b0abfc3b485622c1a08 + image: docker.elastic.co/kibana/kibana:9.0.3@sha256:c4c00a485fbc3619d8373f3bc74e9dd5b5a34380ef50442be4366e8fb57cd50a restart: on-failure links: - elasticsearch @@ -22,7 +22,7 @@ services: elasticsearch: condition: service_started kibana: - image: docker.elastic.co/kibana/kibana:9.1.2@sha256:dea5d20df42e6833966deceadb10ecdbf85970b704d17b0abfc3b485622c1a08 + image: docker.elastic.co/kibana/kibana:9.0.3@sha256:c4c00a485fbc3619d8373f3bc74e9dd5b5a34380ef50442be4366e8fb57cd50a environment: SERVER_NAME: kibana ELASTICSEARCH_HOSTS: http://es:9200 diff --git a/libs/go-kibana-rest/kbapi/api._.go b/libs/go-kibana-rest/kbapi/api._.go index 5d8c4fe30..cce4657e0 100644 --- a/libs/go-kibana-rest/kbapi/api._.go +++ b/libs/go-kibana-rest/kbapi/api._.go @@ -8,7 +8,6 @@ import ( type API struct { KibanaSpaces *KibanaSpacesAPI KibanaRoleManagement *KibanaRoleManagementAPI - KibanaDashboard *KibanaDashboardAPI KibanaSavedObject *KibanaSavedObjectAPI KibanaStatus *KibanaStatusAPI KibanaLogstashPipeline *KibanaLogstashPipelineAPI @@ -34,12 +33,6 @@ type KibanaRoleManagementAPI struct { Delete KibanaRoleManagementDelete } -// KibanaDashboardAPI handle the dashboard API -type KibanaDashboardAPI struct { - Export KibanaDashboardExport - Import KibanaDashboardImport -} - // KibanaSavedObjectAPI handle the saved object API type KibanaSavedObjectAPI struct { Get KibanaSavedObjectGet @@ -92,10 +85,6 @@ func New(c *resty.Client) *API { CreateOrUpdate: newKibanaRoleManagementCreateOrUpdateFunc(c), Delete: newKibanaRoleManagementDeleteFunc(c), }, - KibanaDashboard: &KibanaDashboardAPI{ - Export: newKibanaDashboardExportFunc(c), - Import: newKibanaDashboardImportFunc(c), - }, KibanaSavedObject: &KibanaSavedObjectAPI{ Get: newKibanaSavedObjectGetFunc(c), Find: newKibanaSavedObjectFindFunc(c), diff --git a/libs/go-kibana-rest/kbapi/api.kibana_dashboard.go b/libs/go-kibana-rest/kbapi/api.kibana_dashboard.go deleted file mode 100644 index 69be75594..000000000 --- a/libs/go-kibana-rest/kbapi/api.kibana_dashboard.go +++ /dev/null @@ -1,114 +0,0 @@ -package kbapi - -import ( - "encoding/json" - "fmt" - "github.com/go-resty/resty/v2" - log "github.com/sirupsen/logrus" - "strings" -) - -const ( - basePathKibanaDashboard = "/api/kibana/dashboards" // Base URL to access on Kibana dashboard -) - -// KibanaDashboardExport permit to export dashboard -type KibanaDashboardExport func(listID []string, kibanaSpace string) (map[string]interface{}, error) - -// KibanaDashboardImport permit to import dashboard -type KibanaDashboardImport func(data map[string]interface{}, listExcludeType []string, force bool, kibanaSpace string) error - -// newKibanaDashboardExportFunc permit to export Kibana dashboard by its names -func newKibanaDashboardExportFunc(c *resty.Client) KibanaDashboardExport { - return func(listID []string, kibanaSpace string) (map[string]interface{}, error) { - - if len(listID) == 0 { - return nil, NewAPIError(600, "You must provide on or more dashboard ID") - } - log.Debug("listID: ", listID) - log.Debug("kibanaSpace: ", kibanaSpace) - - var path string - if kibanaSpace == "" || kibanaSpace == "default" { - path = fmt.Sprintf("%s/export", basePathKibanaDashboard) - } else { - path = fmt.Sprintf("/s/%s%s/export", kibanaSpace, basePathKibanaDashboard) - } - - log.Debugf("Url to export: %s", path) - - query := fmt.Sprintf("dashboard=%s", strings.Join(listID, ",")) - resp, err := c.R().SetQueryString(query).Get(path) - if err != nil { - return nil, err - } - log.Debug("Response: ", resp) - if resp.StatusCode() >= 300 { - if resp.StatusCode() == 404 { - return nil, nil - } - return nil, NewAPIError(resp.StatusCode(), resp.Status()) - - } - var data map[string]interface{} - err = json.Unmarshal(resp.Body(), &data) - if err != nil { - return nil, err - } - log.Debug("Data: ", data) - - return data, nil - } - -} - -// newKibanaDashboardImportFunc permit to import kibana dashboard -func newKibanaDashboardImportFunc(c *resty.Client) KibanaDashboardImport { - return func(data map[string]interface{}, listExcludeType []string, force bool, kibanaSpace string) error { - - if data == nil { - return NewAPIError(600, "You must provide one or more dashboard to import") - } - log.Debug("data: ", data) - log.Debug("List type to exclude: ", listExcludeType) - log.Debug("Force import: ", force) - log.Debug("KibanaSpace: ", kibanaSpace) - - var path string - if kibanaSpace == "" || kibanaSpace == "default" { - path = fmt.Sprintf("%s/import", basePathKibanaDashboard) - } else { - path = fmt.Sprintf("/s/%s%s/import", kibanaSpace, basePathKibanaDashboard) - } - - log.Debugf("URL to import %s", path) - - request := c.R().SetQueryString(fmt.Sprintf("force=%t", force)) - if len(listExcludeType) > 0 { - request = request.SetQueryString(fmt.Sprintf("exclude=%s", strings.Join(listExcludeType, ","))) - } - jsonData, err := json.Marshal(data) - if err != nil { - return err - } - resp, err := request.SetBody(jsonData).Post(path) - if err != nil { - return err - } - log.Debug("Response: ", resp) - if resp.StatusCode() >= 300 { - return NewAPIError(resp.StatusCode(), resp.Status()) - } - var dataResponse map[string]interface{} - err = json.Unmarshal(resp.Body(), &dataResponse) - if err != nil { - return err - } - log.Debug("Data response: ", dataResponse) - - // Need to manage error returned in response - - return nil - } - -} diff --git a/libs/go-kibana-rest/kbapi/api.kibana_dashboard_test.go b/libs/go-kibana-rest/kbapi/api.kibana_dashboard_test.go deleted file mode 100644 index 93150c668..000000000 --- a/libs/go-kibana-rest/kbapi/api.kibana_dashboard_test.go +++ /dev/null @@ -1,46 +0,0 @@ -package kbapi - -import ( - "encoding/json" - "os" - - "github.com/stretchr/testify/assert" -) - -func (s *KBAPITestSuite) TestKibanaDashboard() { - - // Import dashboard from fixtures - b, err := os.ReadFile("../fixtures/kibana-dashboard.json") - if err != nil { - panic(err) - } - data := make(map[string]interface{}) - if err = json.Unmarshal(b, &data); err != nil { - panic(err) - } - err = s.API.KibanaDashboard.Import(data, nil, true, "default") - assert.NoError(s.T(), err) - - // Export dashboard - data, err = s.API.KibanaDashboard.Export([]string{"edf84fe0-e1a0-11e7-b6d5-4dc382ef7f5b"}, "default") - assert.NoError(s.T(), err) - assert.NotNil(s.T(), data) - - // Import dashboard from fixtures in specific space - b, err = os.ReadFile("../fixtures/kibana-dashboard.json") - if err != nil { - panic(err) - } - data = make(map[string]interface{}) - if err = json.Unmarshal(b, &data); err != nil { - panic(err) - } - err = s.API.KibanaDashboard.Import(data, nil, true, "testacc") - assert.NoError(s.T(), err) - - // Export dashboard from specific space - data, err = s.API.KibanaDashboard.Export([]string{"edf84fe0-e1a0-11e7-b6d5-4dc382ef7f5b"}, "testacc") - assert.NoError(s.T(), err) - assert.NotNil(s.T(), data) - -} diff --git a/libs/go-kibana-rest/kbapi/api.kibana_spaces_test.go b/libs/go-kibana-rest/kbapi/api.kibana_spaces_test.go index 77d34a1d8..c7a4f9a26 100644 --- a/libs/go-kibana-rest/kbapi/api.kibana_spaces_test.go +++ b/libs/go-kibana-rest/kbapi/api.kibana_spaces_test.go @@ -42,7 +42,7 @@ func (s *KBAPITestSuite) TestKibanaSpaces() { Objects: []KibanaSpaceObjectParameter{ { Type: "config", - ID: "8.17.0", + ID: "9.0.3", }, }, } diff --git a/libs/go-kibana-rest/kbapi/api.kibana_synthetics.go b/libs/go-kibana-rest/kbapi/api.kibana_synthetics.go index 99bde4939..6389d699f 100644 --- a/libs/go-kibana-rest/kbapi/api.kibana_synthetics.go +++ b/libs/go-kibana-rest/kbapi/api.kibana_synthetics.go @@ -326,13 +326,13 @@ func (f BrowserMonitorFields) APIRequest(config SyntheticsMonitorConfig) interfa } } -type KibanaSyntheticsMonitorAdd func(ctx context.Context, config SyntheticsMonitorConfig, fields MonitorFields, namespace string) (*SyntheticsMonitor, error) +type KibanaSyntheticsMonitorAdd func(ctx context.Context, config SyntheticsMonitorConfig, fields MonitorFields, space string) (*SyntheticsMonitor, error) -type KibanaSyntheticsMonitorUpdate func(ctx context.Context, id MonitorID, config SyntheticsMonitorConfig, fields MonitorFields, namespace string) (*SyntheticsMonitor, error) +type KibanaSyntheticsMonitorUpdate func(ctx context.Context, id MonitorID, config SyntheticsMonitorConfig, fields MonitorFields, space string) (*SyntheticsMonitor, error) -type KibanaSyntheticsMonitorGet func(ctx context.Context, id MonitorID, namespace string) (*SyntheticsMonitor, error) +type KibanaSyntheticsMonitorGet func(ctx context.Context, id MonitorID, space string) (*SyntheticsMonitor, error) -type KibanaSyntheticsMonitorDelete func(ctx context.Context, namespace string, ids ...MonitorID) ([]MonitorDeleteStatus, error) +type KibanaSyntheticsMonitorDelete func(ctx context.Context, space string, ids ...MonitorID) ([]MonitorDeleteStatus, error) type KibanaSyntheticsPrivateLocationCreate func(ctx context.Context, pLoc PrivateLocationConfig) (*PrivateLocation, error) @@ -390,8 +390,8 @@ func newKibanaSyntheticsPrivateLocationDeleteFunc(c *resty.Client) KibanaSynthet } func newKibanaSyntheticsMonitorGetFunc(c *resty.Client) KibanaSyntheticsMonitorGet { - return func(ctx context.Context, id MonitorID, namespace string) (*SyntheticsMonitor, error) { - path := basePathWithId(namespace, monitorsSuffix, id) + return func(ctx context.Context, id MonitorID, space string) (*SyntheticsMonitor, error) { + path := basePathWithId(space, monitorsSuffix, id) log.Debugf("URL to get monitor: %s", path) resp, err := c.R().SetContext(ctx).Get(path) @@ -403,8 +403,8 @@ func newKibanaSyntheticsMonitorGetFunc(c *resty.Client) KibanaSyntheticsMonitorG } func newKibanaSyntheticsMonitorDeleteFunc(c *resty.Client) KibanaSyntheticsMonitorDelete { - return func(ctx context.Context, namespace string, ids ...MonitorID) ([]MonitorDeleteStatus, error) { - path := basePath(namespace, monitorsSuffix) + return func(ctx context.Context, space string, ids ...MonitorID) ([]MonitorDeleteStatus, error) { + path := basePath(space, monitorsSuffix) log.Debugf("URL to delete monitors: %s", path) resp, err := c.R().SetContext(ctx).SetBody(map[string]interface{}{ @@ -420,9 +420,9 @@ func newKibanaSyntheticsMonitorDeleteFunc(c *resty.Client) KibanaSyntheticsMonit } func newKibanaSyntheticsMonitorUpdateFunc(c *resty.Client) KibanaSyntheticsMonitorUpdate { - return func(ctx context.Context, id MonitorID, config SyntheticsMonitorConfig, fields MonitorFields, namespace string) (*SyntheticsMonitor, error) { + return func(ctx context.Context, id MonitorID, config SyntheticsMonitorConfig, fields MonitorFields, space string) (*SyntheticsMonitor, error) { - path := basePathWithId(namespace, monitorsSuffix, id) + path := basePathWithId(space, monitorsSuffix, id) log.Debugf("URL to update monitor: %s", path) data := fields.APIRequest(config) resp, err := c.R().SetContext(ctx).SetBody(data).Put(path) @@ -434,9 +434,9 @@ func newKibanaSyntheticsMonitorUpdateFunc(c *resty.Client) KibanaSyntheticsMonit } func newKibanaSyntheticsMonitorAddFunc(c *resty.Client) KibanaSyntheticsMonitorAdd { - return func(ctx context.Context, config SyntheticsMonitorConfig, fields MonitorFields, namespace string) (*SyntheticsMonitor, error) { + return func(ctx context.Context, config SyntheticsMonitorConfig, fields MonitorFields, space string) (*SyntheticsMonitor, error) { - path := basePath(namespace, monitorsSuffix) + path := basePath(space, monitorsSuffix) log.Debugf("URL to create monitor: %s", path) data := fields.APIRequest(config) resp, err := c.R().SetContext(ctx).SetBody(data).Post(path) @@ -510,18 +510,18 @@ func handleKibanaError(err error, resp *resty.Response) error { return nil } -func basePathWithId(namespace, suffix string, id any) string { - return fmt.Sprintf("%s/%s", basePath(namespace, suffix), id) +func basePathWithId(space, suffix string, id any) string { + return fmt.Sprintf("%s/%s", basePath(space, suffix), id) } -func basePath(namespace, suffix string) string { - return namespaceBasesPath(namespace, basePathKibanaSynthetics, suffix) +func basePath(space, suffix string) string { + return spaceBasesPath(space, basePathKibanaSynthetics, suffix) } -func namespaceBasesPath(namespace, basePath, suffix string) string { - if namespace == "" || namespace == "default" { +func spaceBasesPath(space, basePath, suffix string) string { + if space == "" || space == "default" { return fmt.Sprintf("%s%s", basePath, suffix) } - return fmt.Sprintf("/s/%s%s%s", namespace, basePath, suffix) + return fmt.Sprintf("/s/%s%s%s", space, basePath, suffix) } diff --git a/libs/go-kibana-rest/kbapi/api.kibana_synthetics_test.go b/libs/go-kibana-rest/kbapi/api.kibana_synthetics_test.go index ac6d6fbc5..1c48454a6 100644 --- a/libs/go-kibana-rest/kbapi/api.kibana_synthetics_test.go +++ b/libs/go-kibana-rest/kbapi/api.kibana_synthetics_test.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "fmt" + "strings" "testing" "github.com/go-resty/resty/v2" @@ -12,22 +13,22 @@ import ( ) var ( - spaces = []string{"", "default", "testacc"} + spaces = []string{"", "default", "testacc", "sample-with-dash"} ) -func testWithPolicy(t *testing.T, client *resty.Client, namespace string, f func(policyId string)) { +func testWithPolicy(t *testing.T, client *resty.Client, space_id string, f func(policyId string)) { policyName := uuid.New().String() - path := namespaceBasesPath(namespace, "/api/fleet", "/agent_policies") + path := spaceBasesPath(space_id, "/api/fleet", "/agent_policies") - if namespace == "" { - namespace = "default" + if space_id == "" { + space_id = "default" } policyResponse, err := client.R().SetBody(map[string]interface{}{ "name": fmt.Sprintf("Test synthetics monitor policy %s", policyName), "description": "test policy for synthetics API", - "namespace": namespace, + "namespace": strings.Replace(space_id, "-", "_", -1), "monitoring_enabled": []string{"logs", "metrics"}, }).Post(path) assert.NoError(t, err) @@ -64,6 +65,7 @@ func (s *KBAPITestSuite) TestKibanaSyntheticsMonitorAPI() { for _, n := range spaces { testUuid := uuid.New().String() space := n + namespace := strings.Replace(n, "-", "_", -1) syntheticsAPI := s.API.KibanaSynthetics testWithPolicy(s.T(), s.client, space, func(policyId string) { @@ -138,7 +140,7 @@ func (s *KBAPITestSuite) TestKibanaSyntheticsMonitorAPI() { }, APMServiceName: "APMServiceName", TimeoutSeconds: 42, - Namespace: space, + Namespace: namespace, Params: map[string]interface{}{ "param1": "some-params", "my_url": "http://localhost:8080", @@ -212,7 +214,7 @@ func (s *KBAPITestSuite) TestKibanaSyntheticsMonitorAPI() { }, APMServiceName: "APMServiceName", TimeoutSeconds: 42, - Namespace: space, + Namespace: namespace, Params: map[string]interface{}{ "param1": "some-params", "my_url": "http://localhost:8080", @@ -280,7 +282,7 @@ func (s *KBAPITestSuite) TestKibanaSyntheticsMonitorAPI() { }, APMServiceName: "APMServiceName", TimeoutSeconds: 42, - Namespace: space, + Namespace: namespace, Params: map[string]interface{}{ "param1": "some-params", "my_url": "http://localhost:8080", @@ -338,7 +340,7 @@ func (s *KBAPITestSuite) TestKibanaSyntheticsMonitorAPI() { }, APMServiceName: "APMServiceName", TimeoutSeconds: 42, - Namespace: space, + Namespace: namespace, Params: map[string]interface{}{ "param1": "some-params", "my_url": "http://localhost:8080", @@ -424,11 +426,11 @@ func (s *KBAPITestSuite) TestKibanaSyntheticsPrivateLocationAPI() { for _, n := range spaces { testUuid := uuid.New().String() - namespace := n + space_id := n pAPI := s.API.KibanaSynthetics.PrivateLocation s.Run(fmt.Sprintf("TestKibanaSyntheticsPrivateLocationAPI - %s", n), func() { - testWithPolicy(s.T(), s.client, namespace, func(policyId string) { + testWithPolicy(s.T(), s.client, space_id, func(policyId string) { cfg := PrivateLocationConfig{ Label: fmt.Sprintf("TestKibanaSyntheticsPrivateLocationAPI-%s", testUuid), From b2121091a64572a50eef928cdfadb8fa460c0d0b Mon Sep 17 00:00:00 2001 From: Dominik Giger Date: Wed, 27 Aug 2025 14:49:52 +0200 Subject: [PATCH 28/66] Extend CONTRIBUTING.md (#1262) Add some more detailed instructions on how to write code for the provider. This is more of a starting point which can be extended further. The goal is to have a basic guidance for anyone new to the provider development: Which plugin to use, which resource to use as a baseline example, etc. --- CONTRIBUTING.md | 150 ++++++++++++++++++++++++++++++++++++------------ README.md | 57 +----------------- 2 files changed, 114 insertions(+), 93 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 24b7a27b0..a66169b62 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,60 +1,136 @@ -# Typical development workflow +# Contributing -Fork the repo, work on an issue +This guide explains how to set up your environment, make changes, and submit a PR. -## Updating the generated Kibana client. +## Development Setup -If your work involves the Kibana API, the endpoints may or may not be included in the generated client. -Check [generated/kbapi](./generated/kbapi/) for more details. +* Fork and clone the repo. +* Setup your preferred IDE (IntelliJ, VSCode, etc.) -## Acceptance tests +Requirements: +* [Terraform](https://www.terraform.io/downloads.html) >= 1.0.0 +* [Go](https://golang.org/doc/install) >= 1.25 +* Docker (for acceptance tests) -```bash -make docker-testacc -``` +## Development Workflow -Run a single test with terraform debug enabled: -```bash -env TF_LOG=DEBUG make docker-testacc TESTARGS='-run ^TestAccResourceDataStreamLifecycle$$' -``` +* Create a new branch for your changes. +* Make your changes. See [Useful Commands](#useful-commands) and [Debugging](#running--debugging-the-provider). +* Validate your changes + * Run unit and acceptance tests (See [Running Acceptance Tests](#running-acceptance-tests)). + * Run `make lint` to check linting and formatting. For this check to succeed, all changes must have been committed. + * All checks also run automatically on every PR. +* Submit your PR for review. +* Add a changelog entry in `CHANGELOG.md` under the `Unreleased` section. This will be included in the release notes of the next release. The changelog entry references the PR, so it has to be added after the PR has been opened. -A way to forward debug logs to a file: -```bash -env TF_ACC_LOG_PATH=/tmp/tf.log TF_ACC_LOG=DEBUG TF_LOG=DEBUG make docker-testacc -``` +When creating new resources: +* Use the [Plugin Framework](https://developer.hashicorp.com/terraform/plugin/framework/getting-started/code-walkthrough) for new resources. + * Use an existing resource (e.g. `internal/elasticsearch/security/system_user`) as a template. + * Some resources use the deprecated Terraform SDK, so only resources using the new Terraform Framework should be used as reference. +* Use the generated API clients to interact with the Kibana APIs. (See [Working with Generated API Clients](#working-with-generated-api-clients) +* Add a documentation template and examples for the resource. See [Updating Documentation](#updating-documentation) for more details. +* Write unit and acceptance tests. +### Useful Commands -## Update documentation +* `make build`: Build the provider. +* `make lint`: Lints and formats the code. +* `make test`: Run unit tests. +* `make docs-generate`: Generate documentation. +* [Running & Debugging the Provider](#running--debugging-the-provider) +* [Running Acceptance Tests](#running-acceptance-tests) -Update documentation templates in `./templates` directory and re-generate docs via: -```bash -make docs-generate -``` +### Running & Debugging the Provider -## Update `./CHANGELOG.md` +You can run the currently checked-out code for local testing and use it with Terraform. -List of previous commits is a good example of what should be included in the changelog. +Also see [Terraform docs on debugging](https://developer.hashicorp.com/terraform/plugin/debugging#starting-a-provider-in-debug-mode). +Run the provider in debug mode and reattach the provider in Terraform: +* Launch `main.go` with the `-debug` flag from your IDE. + * Or launch it with `go run main.go -debug` from the command line. +* After launching, the provider will print an env var. Copy the printed `TF_REATTACH_PROVIDERS='{…}'` value. +* Export it in your shell where you run Terraform: `export TF_REATTACH_PROVIDERS='{…}'`. +* Terraform will now talk to your debug instance, and you can set breakpoints. -## Pull request +### Running Acceptance Tests -Format the code before pushing: -```bash -make fmt -``` +Acceptance tests spin up Elasticsearch, Kibana, and Fleet with Docker and run tests in a Go container. -Check if the linting: ```bash -make lint -``` +# Start Elasticsearch, Kibana, and Fleet +make docker-fleet -Create a PR and check acceptance test matrix is green. +# Run all tests +make testacc -## Run provider with local terraform +# Run a specific test +make testacc TESTARGS='-run ^TestAccResourceDataStreamLifecycle$$' -TBD +# Cleanup created docker containers +make docker-clean +``` -## Releasing +### Working with Generated API Clients + +If your work involves the Kibana API, the API client can be generated directly from the Kibana OpenAPI specs: +- For Kibana APIs, use the generated client in `generated/kbapi`. +- To add new endpoints, see [generated/kbapi/README.md](generated/kbapi/README.md). +- Regenerate clients with: + ```sh + make transform generate + ``` + +The codebase includes a number of deprecated clients which should not be used anymore: +- `libs/go-kibana-rest`: Fork of an external library, which is not maintained anymore. +- `generated/alerting`, `generated/connectors`, `generated/slo`: Older generated clients, but based on non-standard specs. If any of these APIs are needed, they should be included in the `kbapi` client. + +### Updating Documentation + +Docs are generated from templates in `templates/` and examples in `examples/`. +* Update or add templates and examples. +* Run `make docs-generate` to produce files under `docs/`. +* Commit the generated files. `make lint` will fail if docs are stale. + +## Project Structure + +A quick overview over what's in each folder: + +* `docs/` - Documentation files + * `data-sources/` - Documentation for Terraform data sources + * `guides/` - User guides and tutorials + * `resources/` - Documentation for Terraform resources +* `examples/` - Example Terraform configurations + * `cloud/` - Examples using the cloud to launch testing stacks + * `data-sources/` - Data source usage examples + * `resources/` - Resource usage examples + * `provider/` - Provider configuration examples +* `generated/` - Auto-generated clients from the `generate-clients` make target + * `kbapi/` - Kibana API client + * `alerting/` - (Deprecated) Kibana alerting API client + * `connectors/` - (Deprecated) Kibana connectors API client + * `slo/` - (Deprecated) SLO (Service Level Objective) API client +* `internal/` - Internal Go packages + * `acctest/` - Acceptance test utilities + * `clients/` - API client implementations + * `elasticsearch/` - Elasticsearch-specific logic + * `fleet/` - Fleet management functionality + * `kibana/` - Kibana-specific logic + * `models/` - Data models and structures + * `schema/` - Connection schema definitions for plugin framework + * `utils/` - Utility functions + * `versionutils/` - Version handling utilities +* `libs/` - External libraries + * `go-kibana-rest/` - (Deprecated) Kibana REST API client library +* `provider/` - Core Terraform provider implementation +* `scripts/` - Utility scripts for development and CI +* `templates/` - Template files for documentation generation + * `data-sources/` - Data source documentation templates + * `resources/` - Resource documentation templates + * `guides/` - Guide documentation templates +* `xpprovider/` - Additional provider functionality needed for Crossplane + +## Releasing (maintainers) Releasing is implemented in CI pipeline. @@ -65,4 +141,4 @@ To release a new provider version: - updates CHANGELOG.md with the list of changes being released. [Example](https://github.com/elastic/terraform-provider-elasticstack/commit/be866ebc918184e843dc1dd2f6e2e1b963da386d). -* Once the PR is merged, the release CI pipeline can be started by pushing a new release tag to the `main` branch. +* Once the PR is merged, the release CI pipeline can be started by pushing a new release tag to the `main` branch. (`git tag v0.11.13 && git push origin v0.11.13`) diff --git a/README.md b/README.md index 9adefc90d..1de54e1c0 100644 --- a/README.md +++ b/README.md @@ -76,64 +76,9 @@ provider "elasticstack" { } ``` - ## Developing the Provider -If you wish to work on the provider, you'll first need [Go](http://www.golang.org) installed on your machine (see [Requirements](#requirements)). - -To compile the provider, run `go install`. This will build the provider and put the provider binary in the `$GOPATH/bin` directory. - -To install the provider locally into the `~/.terraform.d/plugins/...` directory one can use `make install` command. This will allow to refer this provider directly in the Terraform configuration without needing to download it from the registry. - -To generate or update documentation, run `make gen`. All the generated docs will have to be committed to the repository as well. - -In order to run the full suite of Acceptance tests, run `make testacc`. - -If you have [Docker](https://docs.docker.com/get-docker/) installed, you can use following command to start the Elasticsearch container and run Acceptance tests against it: - -```sh -$ make docker-testacc -``` - -To clean up the used containers and to free up the assigned container names, run `make docker-clean`. - -Note: there have been some issues encountered when using `tfenv` for local development. It's recommended you move your version management for terraform to `asdf` instead. - - -### Requirements - -- [Terraform](https://www.terraform.io/downloads.html) >= 1.0.0 -- [Go](https://golang.org/doc/install) >= 1.19 - - -### Building The Provider - -1. Clone the repository -1. Enter the repository directory -1. Build the provider using the `make install` command: -```sh -$ make install -``` - - -### Adding Dependencies - -This provider uses [Go modules](https://github.com/golang/go/wiki/Modules). -Please see the Go documentation for the most up to date information about using Go modules. - -To add a new dependency `github.com/author/dependency` to your Terraform provider: - -``` -go get github.com/author/dependency -go mod tidy -``` - -Then commit the changes to `go.mod` and `go.sum`. - -### Generating Kibana clients - -Kibana clients for some APIs are generated based on Kibana OpenAPI specs. -Please see [Makefile](./Makefile) tasks for more details. +See [CONTRIBUTING.md](CONTRIBUTING.md) ## Support From 3f6be4bfdedc0dc864576dc1a61136b32350620e Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Fri, 29 Aug 2025 08:36:55 +1000 Subject: [PATCH 29/66] Migrate Kibana connectors to use the bundled openapi generated client (#1260) * Migrate Kibana connectors to use the bundled openapi generated client * Whelp, actually fix the issue... :( --- generated/connectors/README.md | 44 - generated/connectors/bundled.yaml | 3632 ---------- generated/connectors/connectors.gen.go | 5720 ---------------- generated/connectors/connectors.go | 3 - generated/kbapi/kibana.gen.go | 6086 ++++++++++++++--- generated/kbapi/transform_schema.go | 102 +- internal/clients/api_client.go | 49 - internal/clients/kibana/connector.go | 1670 ----- internal/clients/kibana/connector_test.go | 256 - internal/clients/kibana_oapi/connector.go | 513 ++ .../clients/kibana_oapi/connector_test.go | 264 + internal/clients/kibana_oapi/errors.go | 11 + internal/kibana/connector.go | 56 +- internal/kibana/connector_data_source.go | 8 +- internal/kibana/connector_test.go | 151 +- 15 files changed, 6155 insertions(+), 12410 deletions(-) delete mode 100644 generated/connectors/README.md delete mode 100644 generated/connectors/bundled.yaml delete mode 100644 generated/connectors/connectors.gen.go delete mode 100644 generated/connectors/connectors.go delete mode 100644 internal/clients/kibana/connector.go delete mode 100644 internal/clients/kibana/connector_test.go create mode 100644 internal/clients/kibana_oapi/connector.go create mode 100644 internal/clients/kibana_oapi/connector_test.go diff --git a/generated/connectors/README.md b/generated/connectors/README.md deleted file mode 100644 index e4e3c852d..000000000 --- a/generated/connectors/README.md +++ /dev/null @@ -1,44 +0,0 @@ -[OpenAPI specs](./bundled.yaml) is copied from [Kibana repo](https://raw.githubusercontent.com/elastic/kibana/8.7/x-pack/plugins/actions/docs/openapi/bundled.yaml) with some modifications: - -- added `.gemini` as a possible value for `connector_types`; -- `.slack_api` connector support comes from version 8.8 of the API specification; -- added `.slack_api` as a possible value for `connector_types`; -- added mapping section for discriminator field in `POST` `/s/{spaceId}/api/actions/connector`; -- added explicit object definitions for `400`, `401` and `404` errors (`oapi-codegen` doesn't generate proper code for embedded anonymous objects in some cases) - `bad_request_error`, `authorization_error` and `object_not_found_error`; -- added missing `oneOf` types in `requestBody` for `PUT` `/s/{spaceId}/api/actions/connector/{connectorId}` - the original `bundled.yaml` misses some connector types in the `PUT` `requestBody` defintion: - - `update_connector_request_email`; - - `update_connector_request_pagerduty`; - - `update_connector_request_servicenow_sir`; - - `update_connector_request_slack`; - - `update_connector_request_slack_api`; - - `update_connector_request_teams`; - - `update_connector_request_tines`; - - `update_connector_request_webhook`; - - `update_connector_request_xmatters`. -- response definitions of `/s/{spaceId}/api/actions/connector/{connectorId}/_execute` and `/s/{spaceId}/api/actions/action/{actionId}/_execute` are modified from embedded object definitions to named ones `run_connector_general_response` and `legacy_run_connector_general_response`; -- specified properties for following types. The original `bundled.yaml` defines them as dynamic objects (`additionalProperties: true`): - - `config_propeties_email`; - - `config_properties_pagerduty`; - - `config_properties_tines`; - - `config_properties_webhook`; - - `config_properties_xmatters`; -- `is_deprecated` is marked as optional field (it's required field in the vanilla `bundled.yaml`) in the following objects (Kibana responses may omit it): - - `connector_response_properties_cases_webhook`; - - `connector_response_properties_email`; - - `connector_response_properties_index`; - - `connector_response_properties_jira`; - - `connector_response_properties_opsgenie`; - - `connector_response_properties_pagerduty`; - - `connector_response_properties_resilient`; - - `connector_response_properties_serverlog`; - - `connector_response_properties_servicenow`; - - `connector_response_properties_servicenow_itom`; - - `connector_response_properties_servicenow_sir`; - - `connector_response_properties_slack`; - - `connector_response_properties_slack_api`; - - `connector_response_properties_swimlane`; - - `connector_response_properties_teams`; - - `connector_response_properties_tines`; - - `connector_response_properties_webhook`; - - `connector_response_properties_xmatters`. -- added mapping section for discriminator field in `connector_response_properties`. diff --git a/generated/connectors/bundled.yaml b/generated/connectors/bundled.yaml deleted file mode 100644 index f93c47be6..000000000 --- a/generated/connectors/bundled.yaml +++ /dev/null @@ -1,3632 +0,0 @@ -openapi: 3.0.1 -info: - title: Connectors - description: OpenAPI schema for Connectors endpoints - version: '0.1' - contact: - name: Connectors Team - license: - name: Elastic License 2.0 - url: https://www.elastic.co/licensing/elastic-license -tags: - - name: connectors - description: Connector APIs enable you to create and manage connectors. -servers: - - url: http://localhost:5601 - description: local -paths: - /s/{spaceId}/api/actions/connector: - post: - summary: Creates a connector. - operationId: createConnector - description: | - You must have `all` privileges for the **Actions and Connectors** feature in the **Management** section of the Kibana feature privileges. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/kbn_xsrf' - - $ref: '#/components/parameters/space_id' - requestBody: - required: true - content: - application/json: - schema: - title: Create connector request body properties - description: The properties vary depending on the connector type. - oneOf: - - $ref: '#/components/schemas/create_connector_request_cases_webhook' - - $ref: '#/components/schemas/create_connector_request_email' - - $ref: '#/components/schemas/create_connector_request_gemini' - - $ref: '#/components/schemas/create_connector_request_index' - - $ref: '#/components/schemas/create_connector_request_jira' - - $ref: '#/components/schemas/create_connector_request_opsgenie' - - $ref: '#/components/schemas/create_connector_request_pagerduty' - - $ref: '#/components/schemas/create_connector_request_resilient' - - $ref: '#/components/schemas/create_connector_request_serverlog' - - $ref: '#/components/schemas/create_connector_request_servicenow' - - $ref: '#/components/schemas/create_connector_request_servicenow_itom' - - $ref: '#/components/schemas/create_connector_request_servicenow_sir' - - $ref: '#/components/schemas/create_connector_request_slack' - - $ref: '#/components/schemas/create_connector_request_slack_api' - - $ref: '#/components/schemas/create_connector_request_swimlane' - - $ref: '#/components/schemas/create_connector_request_teams' - - $ref: '#/components/schemas/create_connector_request_tines' - - $ref: '#/components/schemas/create_connector_request_webhook' - - $ref: '#/components/schemas/create_connector_request_xmatters' - discriminator: - propertyName: connector_type_id - mapping: - .cases-webhook: '#/components/schemas/create_connector_request_cases_webhook' - .email: '#/components/schemas/create_connector_request_email' - .gemini: '#/components/schemas/create_connector_request_gemini' - .index: '#/components/schemas/create_connector_request_index' - .jira: '#/components/schemas/create_connector_request_jira' - .opsgenie: '#/components/schemas/create_connector_request_opsgenie' - .pagerduty: '#/components/schemas/create_connector_request_pagerduty' - .resilient: '#/components/schemas/create_connector_request_resilient' - .server-log: '#/components/schemas/create_connector_request_serverlog' - .servicenow: '#/components/schemas/create_connector_request_servicenow' - .servicenow-itom: '#/components/schemas/create_connector_request_servicenow_itom' - .servicenow-sir: '#/components/schemas/create_connector_request_servicenow_sir' - .slack: '#/components/schemas/create_connector_request_slack' - .slack_api: '#/components/schemas/create_connector_request_slack_api' - .swimlane: '#/components/schemas/create_connector_request_swimlane' - .teams: '#/components/schemas/create_connector_request_teams' - .tines: '#/components/schemas/create_connector_request_tines' - .webhook: '#/components/schemas/create_connector_request_webhook' - .xmatters: '#/components/schemas/create_connector_request_xmatters' - examples: - createIndexConnectorRequest: - $ref: '#/components/examples/create_index_connector_request' - responses: - '200': - description: Indicates a successful call. - content: - application/json: - schema: - $ref: '#/components/schemas/connector_response_properties' - examples: - createIndexConnectorResponse: - $ref: '#/components/examples/create_index_connector_response' - '400': - description: Indicates a bad request. - content: - application/json: - schema: - $ref: '#/components/schemas/bad_request_error' - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - servers: - - url: https://localhost:5601 - servers: - - url: https://localhost:5601 - /s/{spaceId}/api/actions/connector/{connectorId}: - get: - summary: Retrieves a connector by ID. - operationId: getConnector - description: | - You must have `read` privileges for the **Actions and Connectors** feature in the **Management** section of the Kibana feature privileges. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/connector_id' - - $ref: '#/components/parameters/space_id' - responses: - '200': - description: Indicates a successful call. - content: - application/json: - schema: - $ref: '#/components/schemas/connector_response_properties' - examples: - getConnectorResponse: - $ref: '#/components/examples/get_connector_response' - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - '404': - description: Object is not found. - content: - application/json: - schema: - type: object - properties: - error: - type: string - example: Not Found - message: - type: string - example: Saved object [action/baf33fc0-920c-11ed-b36a-874bd1548a00] not found - statusCode: - type: integer - example: 404 - servers: - - url: https://localhost:5601 - delete: - summary: Deletes a connector. - operationId: deleteConnector - description: | - You must have `all` privileges for the **Actions and Connectors** feature in the **Management** section of the Kibana feature privileges. WARNING: When you delete a connector, it cannot be recovered. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/kbn_xsrf' - - $ref: '#/components/parameters/connector_id' - - $ref: '#/components/parameters/space_id' - responses: - '204': - description: Indicates a successful call. - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - '404': - description: Object is not found. - content: - application/json: - schema: - type: object - properties: - error: - type: string - example: Not Found - message: - type: string - example: Saved object [action/baf33fc0-920c-11ed-b36a-874bd1548a00] not found - statusCode: - type: integer - example: 404 - servers: - - url: https://localhost:5601 - put: - summary: Updates the attributes for a connector. - operationId: updateConnector - description: | - You must have `all` privileges for the **Actions and Connectors** feature in the **Management** section of the Kibana feature privileges. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/kbn_xsrf' - - $ref: '#/components/parameters/connector_id' - - $ref: '#/components/parameters/space_id' - requestBody: - required: true - content: - application/json: - schema: - title: Update connector request body properties - description: The properties vary depending on the connector type. - oneOf: - - $ref: '#/components/schemas/update_connector_request_cases_webhook' - - $ref: '#/components/schemas/update_connector_request_email' - - $ref: '#/components/schemas/update_connector_request_gemini' - - $ref: '#/components/schemas/update_connector_request_index' - - $ref: '#/components/schemas/update_connector_request_jira' - - $ref: '#/components/schemas/update_connector_request_opsgenie' - - $ref: '#/components/schemas/update_connector_request_pagerduty' - - $ref: '#/components/schemas/update_connector_request_resilient' - - $ref: '#/components/schemas/update_connector_request_serverlog' - - $ref: '#/components/schemas/update_connector_request_servicenow' - - $ref: '#/components/schemas/update_connector_request_servicenow_itom' - - $ref: '#/components/schemas/update_connector_request_servicenow_sir' - - $ref: '#/components/schemas/update_connector_request_slack' - - $ref: '#/components/schemas/update_connector_request_slack_api' - - $ref: '#/components/schemas/update_connector_request_swimlane' - - $ref: '#/components/schemas/update_connector_request_teams' - - $ref: '#/components/schemas/update_connector_request_tines' - - $ref: '#/components/schemas/update_connector_request_webhook' - - $ref: '#/components/schemas/update_connector_request_xmatters' - examples: - updateIndexConnectorRequest: - $ref: '#/components/examples/update_index_connector_request' - responses: - '200': - description: Indicates a successful call. - content: - application/json: - schema: - $ref: '#/components/schemas/connector_response_properties' - '400': - description: Indicates a bad request. - content: - application/json: - schema: - $ref: '#/components/schemas/bad_request_error' - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - '404': - description: Object is not found. - content: - application/json: - schema: - $ref: '#/components/schemas/object_not_found_error' - servers: - - url: https://localhost:5601 - servers: - - url: https://localhost:5601 - /s/{spaceId}/api/actions/connectors: - get: - summary: Retrieves all connectors. - operationId: getConnectors - description: | - You must have `read` privileges for the **Actions and Connectors** feature in the **Management** section of the Kibana feature privileges. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/space_id' - responses: - '200': - description: Indicates a successful call. - content: - application/json: - schema: - type: array - items: - title: Get connectors response body properties - description: The properties vary for each connector type. - type: object - required: - - connector_type_id - - id - - is_preconfigured - - name - - referenced_by_count - properties: - connector_type_id: - $ref: '#/components/schemas/connector_types' - config: - type: object - description: The configuration for the connector. Configuration properties vary depending on the connector type. - additionalProperties: true - nullable: true - id: - type: string - description: The identifier for the connector. - example: b0766e10-d190-11ec-b04c-776c77d14fca - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - example: my-connector - referenced_by_count: - type: integer - description: Indicates the number of saved objects that reference the connector. If `is_preconfigured` is true, this value is not calculated. - example: 2 - default: 0 - examples: - getConnectorsResponse: - $ref: '#/components/examples/get_connectors_response' - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - servers: - - url: https://localhost:5601 - servers: - - url: https://localhost:5601 - /s/{spaceId}/api/actions/connector_types: - get: - summary: Retrieves a list of all connector types. - operationId: getConnectorTypes - description: | - You do not need any Kibana feature privileges to run this API. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/space_id' - - in: query - name: feature_id - description: A filter to limit the retrieved connector types to those that support a specific feature (such as alerting or cases). - schema: - $ref: '#/components/schemas/features' - responses: - '200': - description: Indicates a successful call. - content: - application/json: - schema: - title: Get connector types response body properties - description: The properties vary for each connector type. - type: array - items: - type: object - properties: - enabled: - type: boolean - description: Indicates whether the connector type is enabled in Kibana. - example: true - enabled_in_config: - type: boolean - description: Indicates whether the connector type is enabled in the Kibana `.yml` file. - example: true - enabled_in_license: - type: boolean - description: Indicates whether the connector is enabled in the license. - example: true - id: - $ref: '#/components/schemas/connector_types' - minimum_license_required: - type: string - description: The license that is required to use the connector type. - example: basic - name: - type: string - description: The name of the connector type. - example: Index - supported_feature_ids: - type: array - description: The Kibana features that are supported by the connector type. - items: - $ref: '#/components/schemas/features' - example: - - alerting - - uptime - - siem - examples: - getConnectorTypesResponse: - $ref: '#/components/examples/get_connector_types_response' - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - servers: - - url: https://localhost:5601 - servers: - - url: https://localhost:5601 - /s/{spaceId}/api/actions/connector/{connectorId}/_execute: - post: - summary: Runs a connector. - operationId: runConnector - description: | - You can use this API to test an action that involves interaction with Kibana services or integrations with third-party systems. You must have `read` privileges for the **Actions and Connectors** feature in the **Management** section of the Kibana feature privileges. If you use an index connector, you must also have `all`, `create`, `index`, or `write` indices privileges. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/kbn_xsrf' - - $ref: '#/components/parameters/connector_id' - - $ref: '#/components/parameters/space_id' - requestBody: - required: true - content: - application/json: - schema: - title: Run connector request body properties - description: The properties vary depending on the connector type. - type: object - required: - - params - properties: - params: - oneOf: - - $ref: '#/components/schemas/run_connector_params_documents' - - $ref: '#/components/schemas/run_connector_params_level_message' - - title: Subaction parameters - description: Test an action that involves a subaction. - oneOf: - - $ref: '#/components/schemas/run_connector_subaction_addevent' - - $ref: '#/components/schemas/run_connector_subaction_closealert' - - $ref: '#/components/schemas/run_connector_subaction_createalert' - - $ref: '#/components/schemas/run_connector_subaction_fieldsbyissuetype' - - $ref: '#/components/schemas/run_connector_subaction_getchoices' - - $ref: '#/components/schemas/run_connector_subaction_getfields' - - $ref: '#/components/schemas/run_connector_subaction_getincident' - - $ref: '#/components/schemas/run_connector_subaction_issue' - - $ref: '#/components/schemas/run_connector_subaction_issues' - - $ref: '#/components/schemas/run_connector_subaction_issuetypes' - - $ref: '#/components/schemas/run_connector_subaction_pushtoservice' - discriminator: - propertyName: subAction - examples: - runIndexConnectorRequest: - $ref: '#/components/examples/run_index_connector_request' - runJiraConnectorRequest: - $ref: '#/components/examples/run_jira_connector_request' - runServerLogConnectorRequest: - $ref: '#/components/examples/run_server_log_connector_request' - runServiceNowITOMConnectorRequest: - $ref: '#/components/examples/run_servicenow_itom_connector_request' - runSwimlaneConnectorRequest: - $ref: '#/components/examples/run_swimlane_connector_request' - responses: - '200': - description: Indicates a successful call. - content: - application/json: - schema: - $ref: '#/components/schemas/run_connector_general_response' - examples: - runIndexConnectorResponse: - $ref: '#/components/examples/run_index_connector_response' - runJiraConnectorResponse: - $ref: '#/components/examples/run_jira_connector_response' - runServerLogConnectorResponse: - $ref: '#/components/examples/run_server_log_connector_response' - runServiceNowITOMConnectorResponse: - $ref: '#/components/examples/run_servicenow_itom_connector_response' - runSwimlaneConnectorResponse: - $ref: '#/components/examples/run_swimlane_connector_response' - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - servers: - - url: https://localhost:5601 - servers: - - url: https://localhost:5601 - /s/{spaceId}/api/actions/action/{actionId}: - delete: - summary: Deletes a connector. - operationId: legacyDeleteConnector - deprecated: true - description: | - Deprecated in 7.13.0. Use the delete connector API instead. WARNING: When you delete a connector, it cannot be recovered. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/kbn_xsrf' - - $ref: '#/components/parameters/action_id' - - $ref: '#/components/parameters/space_id' - responses: - '204': - description: Indicates a successful call. - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - servers: - - url: https://localhost:5601 - get: - summary: Retrieves a connector by ID. - operationId: legacyGetConnector - description: Deprecated in 7.13.0. Use the get connector API instead. - deprecated: true - tags: - - connectors - parameters: - - $ref: '#/components/parameters/action_id' - - $ref: '#/components/parameters/space_id' - responses: - '200': - $ref: '#/components/responses/200_actions' - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - servers: - - url: https://localhost:5601 - put: - summary: Updates the attributes for a connector. - operationId: legacyUpdateConnector - deprecated: true - description: Deprecated in 7.13.0. Use the update connector API instead. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/kbn_xsrf' - - $ref: '#/components/parameters/action_id' - - $ref: '#/components/parameters/space_id' - requestBody: - required: true - content: - application/json: - schema: - title: Legacy update connector request body properties - description: The properties vary depending on the connector type. - type: object - properties: - config: - type: object - description: The new connector configuration. Configuration properties vary depending on the connector type. - name: - type: string - description: The new name for the connector. - secrets: - type: object - description: The updated secrets configuration for the connector. Secrets properties vary depending on the connector type. - responses: - '200': - $ref: '#/components/responses/200_actions' - '404': - description: Object is not found. - content: - application/json: - schema: - $ref: '#/components/schemas/object_not_found_error' - servers: - - url: https://localhost:5601 - servers: - - url: https://localhost:5601 - /s/{spaceId}/api/actions: - get: - summary: Retrieves all connectors. - operationId: legacyGetConnectors - deprecated: true - description: Deprecated in 7.13.0. Use the get all connectors API instead. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/space_id' - responses: - '200': - description: Indicates a successful call. - content: - application/json: - schema: - type: array - items: - $ref: '#/components/schemas/action_response_properties' - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - servers: - - url: https://localhost:5601 - post: - summary: Creates a connector. - operationId: legacyCreateConnector - deprecated: true - description: Deprecated in 7.13.0. Use the create connector API instead. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/kbn_xsrf' - - $ref: '#/components/parameters/space_id' - requestBody: - required: true - content: - application/json: - schema: - title: Legacy create connector request properties - type: object - properties: - actionTypeId: - type: string - description: The connector type identifier. - config: - type: object - description: The configuration for the connector. Configuration properties vary depending on the connector type. - name: - type: string - description: The display name for the connector. - secrets: - type: object - description: | - The secrets configuration for the connector. Secrets configuration properties vary depending on the connector type. NOTE: Remember these values. You must provide them each time you update the connector. - responses: - '200': - $ref: '#/components/responses/200_actions' - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - servers: - - url: https://localhost:5601 - servers: - - url: https://localhost:5601 - /s/{spaceId}/api/actions/list_action_types: - get: - summary: Retrieves a list of all connector types. - operationId: legacyGetConnectorTypes - deprecated: true - description: Deprecated in 7.13.0. Use the get all connector types API instead. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/space_id' - responses: - '200': - description: Indicates a successful call. - content: - application/json: - schema: - title: Legacy get connector types response body properties - description: The properties vary for each connector type. - type: array - items: - type: object - properties: - enabled: - type: boolean - description: Indicates whether the connector type is enabled in Kibana. - enabledInConfig: - type: boolean - description: Indicates whether the connector type is enabled in the Kibana `.yml` file. - enabledInLicense: - type: boolean - description: Indicates whether the connector is enabled in the license. - example: true - id: - type: string - description: The unique identifier for the connector type. - minimumLicenseRequired: - type: string - description: The license that is required to use the connector type. - name: - type: string - description: The name of the connector type. - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - servers: - - url: https://localhost:5601 - servers: - - url: https://localhost:5601 - /s/{spaceId}/api/actions/action/{actionId}/_execute: - post: - summary: Runs a connector. - operationId: legacyRunConnector - deprecated: true - description: Deprecated in 7.13.0. Use the run connector API instead. - tags: - - connectors - parameters: - - $ref: '#/components/parameters/kbn_xsrf' - - $ref: '#/components/parameters/action_id' - - $ref: '#/components/parameters/space_id' - requestBody: - required: true - content: - application/json: - schema: - title: Legacy run connector request body properties - description: The properties vary depending on the connector type. - type: object - required: - - params - properties: - params: - type: object - description: The parameters of the connector. Parameter properties vary depending on the connector type. - responses: - '200': - description: Indicates a successful call. - content: - application/json: - schema: - $ref: '#/components/schemas/legacy_run_connector_general_response' - '401': - description: Authorization information is missing or invalid. - content: - application/json: - schema: - $ref: '#/components/schemas/authorization_error' - servers: - - url: https://localhost:5601 - servers: - - url: https://localhost:5601 -components: - securitySchemes: - basicAuth: - type: http - scheme: basic - apiKeyAuth: - type: apiKey - in: header - name: ApiKey - parameters: - kbn_xsrf: - schema: - type: string - in: header - name: kbn-xsrf - description: Cross-site request forgery protection - required: true - space_id: - in: path - name: spaceId - description: An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. - required: true - schema: - type: string - example: default - connector_id: - in: path - name: connectorId - description: An identifier for the connector. - required: true - schema: - type: string - example: df770e30-8b8b-11ed-a780-3b746c987a81 - action_id: - in: path - name: actionId - description: An identifier for the action. - required: true - schema: - type: string - example: c55b6eb0-6bad-11eb-9f3b-611eebc6c3ad - schemas: - config_properties_cases_webhook: - title: Connector request properties for Webhook - Case Management connector - required: - - createIncidentJson - - createIncidentResponseKey - - createIncidentUrl - - getIncidentResponseExternalTitleKey - - getIncidentUrl - - updateIncidentJson - - updateIncidentUrl - - viewIncidentUrl - description: Defines properties for connectors when type is `.cases-webhook`. - type: object - properties: - createCommentJson: - type: string - description: | - A JSON payload sent to the create comment URL to create a case comment. You can use variables to add Kibana Cases data to the payload. The required variable is `case.comment`. Due to Mustache template variables (the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated once the Mustache variables have been placed when the REST method runs. Manually ensure that the JSON is valid, disregarding the Mustache variables, so the later validation will pass. - example: - body: - '[object Object]': null - createCommentMethod: - type: string - description: | - The REST API HTTP request method to create a case comment in the third-party system. Valid values are `patch`, `post`, and `put`. - default: put - enum: - - patch - - post - - put - createCommentUrl: - type: string - description: | - The REST API URL to create a case comment by ID in the third-party system. You can use a variable to add the external system ID to the URL. If you are using the `xpack.actions.allowedHosts setting`, add the hostname to the allowed hosts. - example: https://testing-jira.atlassian.net/rest/api/2/issue/{{{external.system.id}}}/comment - createIncidentJson: - type: string - description: | - A JSON payload sent to the create case URL to create a case. You can use variables to add case data to the payload. Required variables are `case.title` and `case.description`. Due to Mustache template variables (which is the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid to avoid future validation errors; disregard Mustache variables during your review. - example: - fields: - summary: - '[object Object]': null - description: - '[object Object]': null - labels: - '[object Object]': null - createIncidentMethod: - type: string - description: | - The REST API HTTP request method to create a case in the third-party system. Valid values are `patch`, `post`, and `put`. - enum: - - patch - - post - - put - default: post - createIncidentResponseKey: - type: string - description: The JSON key in the create case response that contains the external case ID. - createIncidentUrl: - type: string - description: | - The REST API URL to create a case in the third-party system. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - getIncidentResponseExternalTitleKey: - type: string - description: The JSON key in get case response that contains the external case title. - getIncidentUrl: - type: string - description: | - The REST API URL to get the case by ID from the third-party system. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. You can use a variable to add the external system ID to the URL. Due to Mustache template variables (the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid, disregarding the Mustache variables, so the later validation will pass. - example: https://testing-jira.atlassian.net/rest/api/2/issue/{{{external.system.id}}} - hasAuth: - type: boolean - description: If true, a username and password for login type authentication must be provided. - default: true - headers: - type: string - description: | - A set of key-value pairs sent as headers with the request URLs for the create case, update case, get case, and create comment methods. - updateIncidentJson: - type: string - description: | - The JSON payload sent to the update case URL to update the case. You can use variables to add Kibana Cases data to the payload. Required variables are `case.title` and `case.description`. Due to Mustache template variables (which is the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid to avoid future validation errors; disregard Mustache variables during your review. - example: - fields: - summary: - '[object Object]': null - description: - '[object Object]': null - labels: - '[object Object]': null - updateIncidentMethod: - type: string - description: | - The REST API HTTP request method to update the case in the third-party system. Valid values are `patch`, `post`, and `put`. - default: put - enum: - - patch - - post - - put - updateIncidentUrl: - type: string - description: | - The REST API URL to update the case by ID in the third-party system. You can use a variable to add the external system ID to the URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - example: https://testing-jira.atlassian.net/rest/api/2/issue/{{{external.system.ID}}} - viewIncidentUrl: - type: string - description: | - The URL to view the case in the external system. You can use variables to add the external system ID or external system title to the URL. - example: https://testing-jira.atlassian.net/browse/{{{external.system.title}}} - secrets_properties_cases_webhook: - title: Connector secrets properties for Webhook - Case Management connector - type: object - properties: - password: - type: string - description: The password for HTTP basic authentication. If `hasAuth` is set to `true`, this property is required. - user: - type: string - description: The username for HTTP basic authentication. If `hasAuth` is set to `true`, this property is required. - create_connector_request_cases_webhook: - title: Create Webhook - Case Managment connector request - description: | - The Webhook - Case Management connector uses axios to send POST, PUT, and GET requests to a case management RESTful API web service. - type: object - required: - - config - - connector_type_id - - name - properties: - config: - $ref: '#/components/schemas/config_properties_cases_webhook' - connector_type_id: - type: string - description: The type of connector. - enum: - - .cases-webhook - example: .cases-webhook - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_cases_webhook' - config_properties_email: - title: Connector request properties for an email connector - description: Defines properties for connectors when type is `.email`. - type: object - properties: - from: - type: string - host: - type: string - port: - type: integer - clientId: - type: string - nullable: true - default: null - hasAuth: - type: boolean - default: true - oauthTokenUrl: - type: string - nullable: true - default: null - secure: - type: boolean - nullable: true - default: null - service: - type: string - default: other - tenantId: - type: string - nullable: true - default: null - secrets_properties_email: - title: Connector secrets properties for an email connector - description: Defines secrets for connectors when type is `.email`. - type: object - additionalProperties: true - create_connector_request_email: - title: Create email connector request - description: | - The email connector uses the SMTP protocol to send mail messages, using an integration of Nodemailer. An exception is Microsoft Exchange, which uses HTTP protocol for sending emails, Send mail. Email message text is sent as both plain text and html text. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_email' - connector_type_id: - type: string - description: The type of connector. - enum: - - .email - example: .email - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_email' - config_properties_gemini: - title: Connector request properties for an Google Gemini connector - description: Defines properties for connectors when type is `.gemini`. - required: - - apiUrl - - gcpRegion - - gcpProjectID - type: object - properties: - apiUrl: - type: string - description: The Google Gemini request URL. - defaultModel: - type: string - description: The generative artificial intelligence model for Google Gemini to use. - default: gemini-1.5-pro-001 - gcpRegion: - type: string - description: The GCP region where the Vertex AI endpoint enabled. - gcpProjectID: - type: string - description: The Google ProjectID that has Vertex AI endpoint enabled. - secrets_properties_gemini: - title: Connector secrets properties for a Google Gemini connector - description: Defines secrets for connectors when type is `.gemini`. - type: object - required: - - credentialsJson - properties: - credentialsJson: - type: string - description: The service account credentials JSON file. The service account should have Vertex AI user IAM role assigned to it. - create_connector_request_gemini: - title: Create Google Gemini connector request - description: | - The Google Gemini connector uses axios to send a POST request to Google Gemini. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_gemini' - connector_type_id: - type: string - description: The type of connector. - enum: - - .gemini - example: .gemini - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_gemini' - config_properties_index: - title: Connector request properties for an index connector - required: - - index - description: Defines properties for connectors when type is `.index`. - type: object - properties: - executionTimeField: - description: Specifies a field that will contain the time the alert condition was detected. - default: null - type: string - nullable: true - index: - description: The Elasticsearch index to be written to. - type: string - refresh: - description: | - The refresh policy for the write request, which affects when changes are made visible to search. Refer to the refresh setting for Elasticsearch document APIs. - default: false - type: boolean - create_connector_request_index: - title: Create index connector request - description: The index connector indexes a document into Elasticsearch. - type: object - required: - - config - - connector_type_id - - name - properties: - config: - $ref: '#/components/schemas/config_properties_index' - connector_type_id: - type: string - description: The type of connector. - enum: - - .index - example: .index - name: - type: string - description: The display name for the connector. - example: my-connector - config_properties_jira: - title: Connector request properties for a Jira connector - required: - - apiUrl - - projectKey - description: Defines properties for connectors when type is `.jira`. - type: object - properties: - apiUrl: - description: The Jira instance URL. - type: string - projectKey: - description: The Jira project key. - type: string - secrets_properties_jira: - title: Connector secrets properties for a Jira connector - required: - - apiToken - - email - description: Defines secrets for connectors when type is `.jira`. - type: object - properties: - apiToken: - description: The Jira API authentication token for HTTP basic authentication. - type: string - email: - description: The account email for HTTP Basic authentication. - type: string - create_connector_request_jira: - title: Create Jira connector request - description: The Jira connector uses the REST API v2 to create Jira issues. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_jira' - connector_type_id: - type: string - description: The type of connector. - enum: - - .jira - example: .jira - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_jira' - config_properties_opsgenie: - title: Connector request properties for an Opsgenie connector - required: - - apiUrl - description: Defines properties for connectors when type is `.opsgenie`. - type: object - properties: - apiUrl: - description: | - The Opsgenie URL. For example, `https://api.opsgenie.com` or `https://api.eu.opsgenie.com`. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - type: string - secrets_properties_opsgenie: - title: Connector secrets properties for an Opsgenie connector - required: - - apiKey - description: Defines secrets for connectors when type is `.opsgenie`. - type: object - properties: - apiKey: - description: The Opsgenie API authentication key for HTTP Basic authentication. - type: string - create_connector_request_opsgenie: - title: Create Opsgenie connector request - description: The Opsgenie connector uses the Opsgenie alert API. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_opsgenie' - connector_type_id: - type: string - description: The type of connector. - enum: - - .opsgenie - example: .opsgenie - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_opsgenie' - config_properties_pagerduty: - title: Connector request properties for a PagerDuty connector - description: Defines properties for connectors when type is `.pagerduty`. - type: object - properties: - apiUrl: - type: string - nullable: true - secrets_properties_pagerduty: - title: Connector secrets properties for a PagerDuty connector - description: Defines secrets for connectors when type is `.pagerduty`. - type: object - additionalProperties: true - create_connector_request_pagerduty: - title: Create PagerDuty connector request - description: | - The PagerDuty connector uses the v2 Events API to trigger, acknowledge, and resolve PagerDuty alerts. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_pagerduty' - connector_type_id: - type: string - description: The type of connector. - enum: - - .pagerduty - example: .pagerduty - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_pagerduty' - config_properties_resilient: - title: Connector request properties for a IBM Resilient connector - required: - - apiUrl - - orgId - description: Defines properties for connectors when type is `.resilient`. - type: object - properties: - apiUrl: - description: The IBM Resilient instance URL. - type: string - orgId: - description: The IBM Resilient organization ID. - type: string - secrets_properties_resilient: - title: Connector secrets properties for IBM Resilient connector - required: - - apiKeyId - - apiKeySecret - description: Defines secrets for connectors when type is `.resilient`. - type: object - properties: - apiKeyId: - type: string - description: The authentication key ID for HTTP Basic authentication. - apiKeySecret: - type: string - description: The authentication key secret for HTTP Basic authentication. - create_connector_request_resilient: - title: Create IBM Resilient connector request - description: The IBM Resilient connector uses the RESILIENT REST v2 to create IBM Resilient incidents. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_resilient' - connector_type_id: - description: The type of connector. - type: string - example: .resilient - enum: - - .resilient - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_resilient' - create_connector_request_serverlog: - title: Create server log connector request - description: This connector writes an entry to the Kibana server log. - type: object - required: - - connector_type_id - - name - properties: - connector_type_id: - type: string - description: The type of connector. - enum: - - .server-log - example: .server-log - name: - type: string - description: The display name for the connector. - example: my-connector - config_properties_servicenow: - title: Connector request properties for a ServiceNow ITSM connector - required: - - apiUrl - description: Defines properties for connectors when type is `.servicenow`. - type: object - properties: - apiUrl: - type: string - description: The ServiceNow instance URL. - clientId: - description: | - The client ID assigned to your OAuth application. This property is required when `isOAuth` is `true`. - type: string - isOAuth: - description: | - The type of authentication to use. The default value is false, which means basic authentication is used instead of open authorization (OAuth). - default: false - type: boolean - jwtKeyId: - description: | - The key identifier assigned to the JWT verifier map of your OAuth application. This property is required when `isOAuth` is `true`. - type: string - userIdentifierValue: - description: | - The identifier to use for OAuth authentication. This identifier should be the user field you selected when you created an OAuth JWT API endpoint for external clients in your ServiceNow instance. For example, if the selected user field is `Email`, the user identifier should be the user's email address. This property is required when `isOAuth` is `true`. - type: string - usesTableApi: - description: | - Determines whether the connector uses the Table API or the Import Set API. This property is supported only for ServiceNow ITSM and ServiceNow SecOps connectors. NOTE: If this property is set to `false`, the Elastic application should be installed in ServiceNow. - default: true - type: boolean - secrets_properties_servicenow: - title: Connector secrets properties for ServiceNow ITOM, ServiceNow ITSM, and ServiceNow SecOps connectors - description: Defines secrets for connectors when type is `.servicenow`, `.servicenow-sir`, or `.servicenow-itom`. - type: object - properties: - clientSecret: - type: string - description: The client secret assigned to your OAuth application. This property is required when `isOAuth` is `true`. - password: - type: string - description: The password for HTTP basic authentication. This property is required when `isOAuth` is `false`. - privateKey: - type: string - description: The RSA private key that you created for use in ServiceNow. This property is required when `isOAuth` is `true`. - privateKeyPassword: - type: string - description: The password for the RSA private key. This property is required when `isOAuth` is `true` and you set a password on your private key. - username: - type: string - description: The username for HTTP basic authentication. This property is required when `isOAuth` is `false`. - create_connector_request_servicenow: - title: Create ServiceNow ITSM connector request - description: | - The ServiceNow ITSM connector uses the import set API to create ServiceNow incidents. You can use the connector for rule actions and cases. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_servicenow' - connector_type_id: - type: string - description: The type of connector. - enum: - - .servicenow - example: .servicenow - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_servicenow' - config_properties_servicenow_itom: - title: Connector request properties for a ServiceNow ITSM connector - required: - - apiUrl - description: Defines properties for connectors when type is `.servicenow`. - type: object - properties: - apiUrl: - type: string - description: The ServiceNow instance URL. - clientId: - description: | - The client ID assigned to your OAuth application. This property is required when `isOAuth` is `true`. - type: string - isOAuth: - description: | - The type of authentication to use. The default value is false, which means basic authentication is used instead of open authorization (OAuth). - default: false - type: boolean - jwtKeyId: - description: | - The key identifier assigned to the JWT verifier map of your OAuth application. This property is required when `isOAuth` is `true`. - type: string - userIdentifierValue: - description: | - The identifier to use for OAuth authentication. This identifier should be the user field you selected when you created an OAuth JWT API endpoint for external clients in your ServiceNow instance. For example, if the selected user field is `Email`, the user identifier should be the user's email address. This property is required when `isOAuth` is `true`. - type: string - create_connector_request_servicenow_itom: - title: Create ServiceNow ITOM connector request - description: | - The ServiceNow ITOM connector uses the event API to create ServiceNow events. You can use the connector for rule actions. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_servicenow_itom' - connector_type_id: - type: string - description: The type of connector. - enum: - - .servicenow-itom - example: .servicenow-itom - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_servicenow' - create_connector_request_servicenow_sir: - title: Create ServiceNow SecOps connector request - description: | - The ServiceNow SecOps connector uses the import set API to create ServiceNow security incidents. You can use the connector for rule actions and cases. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_servicenow' - connector_type_id: - type: string - description: The type of connector. - enum: - - .servicenow-sir - example: .servicenow-sir - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_servicenow' - secrets_properties_slack: - title: Connector secrets properties for a Slack connector - description: Defines secrets for connectors when type is `.slack`. - type: object - additionalProperties: true - create_connector_request_slack: - title: Create Slack connector request - description: The Slack connector uses Slack Incoming Webhooks. - type: object - required: - - connector_type_id - - name - - secrets - properties: - connector_type_id: - type: string - description: The type of connector. - enum: - - .slack - example: .slack - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_slack' - secrets_properties_slack_api: - title: Connector secrets properties for a Web API Slack connector - description: Defines secrets for connectors when type is `.slack`. - required: - - token - type: object - properties: - token: - type: string - description: Slack bot user OAuth token. - create_connector_request_slack_api: - title: Create Slack connector request - description: The Slack connector uses Slack Incoming Webhooks. - type: object - required: - - connector_type_id - - name - - secrets - properties: - connector_type_id: - type: string - description: The type of connector. - enum: - - .slack_api - example: .slack_api - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_slack_api' - config_properties_swimlane: - title: Connector request properties for a Swimlane connector - required: - - apiUrl - - appId - - connectorType - description: Defines properties for connectors when type is `.swimlane`. - type: object - properties: - apiUrl: - description: The Swimlane instance URL. - type: string - appId: - description: The Swimlane application ID. - type: string - connectorType: - description: The type of connector. Valid values are `all`, `alerts`, and `cases`. - type: string - enum: - - all - - alerts - - cases - mappings: - $ref: '#/components/schemas/config_properties_swimlane_mappings' - config_properties_swimlane_mappings: - title: Connector mappings properties for a Swimlane connector - description: The field mapping. - type: object - properties: - alertIdConfig: - title: Alert identifier mapping - description: Mapping for the alert ID. - type: object - required: - - fieldType - - id - - key - - name - properties: - fieldType: - type: string - description: The type of field in Swimlane. - id: - type: string - description: The identifier for the field in Swimlane. - key: - type: string - description: The key for the field in Swimlane. - name: - type: string - description: The name of the field in Swimlane. - caseIdConfig: - title: Case identifier mapping - description: Mapping for the case ID. - type: object - required: - - fieldType - - id - - key - - name - properties: - fieldType: - type: string - description: The type of field in Swimlane. - id: - type: string - description: The identifier for the field in Swimlane. - key: - type: string - description: The key for the field in Swimlane. - name: - type: string - description: The name of the field in Swimlane. - caseNameConfig: - title: Case name mapping - description: Mapping for the case name. - type: object - required: - - fieldType - - id - - key - - name - properties: - fieldType: - type: string - description: The type of field in Swimlane. - id: - type: string - description: The identifier for the field in Swimlane. - key: - type: string - description: The key for the field in Swimlane. - name: - type: string - description: The name of the field in Swimlane. - commentsConfig: - title: Case comment mapping - description: Mapping for the case comments. - type: object - required: - - fieldType - - id - - key - - name - properties: - fieldType: - type: string - description: The type of field in Swimlane. - id: - type: string - description: The identifier for the field in Swimlane. - key: - type: string - description: The key for the field in Swimlane. - name: - type: string - description: The name of the field in Swimlane. - descriptionConfig: - title: Case description mapping - description: Mapping for the case description. - type: object - required: - - fieldType - - id - - key - - name - properties: - fieldType: - type: string - description: The type of field in Swimlane. - id: - type: string - description: The identifier for the field in Swimlane. - key: - type: string - description: The key for the field in Swimlane. - name: - type: string - description: The name of the field in Swimlane. - ruleNameConfig: - title: Rule name mapping - description: Mapping for the name of the alert's rule. - type: object - required: - - fieldType - - id - - key - - name - properties: - fieldType: - type: string - description: The type of field in Swimlane. - id: - type: string - description: The identifier for the field in Swimlane. - key: - type: string - description: The key for the field in Swimlane. - name: - type: string - description: The name of the field in Swimlane. - severityConfig: - title: Severity mapping - description: Mapping for the severity. - type: object - required: - - fieldType - - id - - key - - name - properties: - fieldType: - type: string - description: The type of field in Swimlane. - id: - type: string - description: The identifier for the field in Swimlane. - key: - type: string - description: The key for the field in Swimlane. - name: - type: string - description: The name of the field in Swimlane. - secrets_properties_swimlane: - title: Connector secrets properties for a Swimlane connector - description: Defines secrets for connectors when type is `.swimlane`. - type: object - properties: - apiToken: - description: Swimlane API authentication token. - type: string - create_connector_request_swimlane: - title: Create Swimlane connector request - description: The Swimlane connector uses the Swimlane REST API to create Swimlane records. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_swimlane' - connector_type_id: - type: string - description: The type of connector. - enum: - - .swimlane - example: .swimlane - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_swimlane' - secrets_properties_teams: - title: Connector secrets properties for a Microsoft Teams connector - description: Defines secrets for connectors when type is `.teams`. - type: object - additionalProperties: true - create_connector_request_teams: - title: Create Microsoft Teams connector request - description: The Microsoft Teams connector uses Incoming Webhooks. - type: object - required: - - connector_type_id - - name - - secrets - properties: - connector_type_id: - type: string - description: The type of connector. - enum: - - .teams - example: .teams - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_teams' - config_properties_tines: - title: Connector request properties for a Tines connector - description: Defines properties for connectors when type is `.tines`. - type: object - required: - - url - properties: - url: - type: string - secrets_properties_tines: - title: Connector secrets properties for a Tines connector - description: Defines secrets for connectors when type is `.tines`. - type: object - additionalProperties: true - create_connector_request_tines: - title: Create Tines connector request - description: | - The Tines connector uses Tines Webhook actions to send events via POST request. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_tines' - connector_type_id: - type: string - description: The type of connector. - enum: - - .tines - example: .tines - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_tines' - config_properties_webhook: - title: Connector request properties for a Webhook connector - description: Defines properties for connectors when type is `.webhook`. - type: object - required: - - url - properties: - url: - type: string - method: - type: string - default: post - enum: - - patch - - post - - put - headers: - type: object - additionalProperties: true - hasAuth: - type: boolean - default: true - secrets_properties_webhook: - title: Connector secrets properties for a Webhook connector - description: Defines secrets for connectors when type is `.webhook`. - type: object - additionalProperties: true - create_connector_request_webhook: - title: Create Webhook connector request - description: | - The Webhook connector uses axios to send a POST or PUT request to a web service. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_webhook' - connector_type_id: - type: string - description: The type of connector. - enum: - - .webhook - example: .webhook - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_webhook' - config_properties_xmatters: - title: Connector request properties for a xMatters connector - description: Defines properties for connectors when type is `.xmatters`. - type: object - properties: - configUrl: - type: string - nullable: true - usesBasic: - type: boolean - default: true - secrets_properties_xmatters: - title: Connector secrets properties for an xMatters connector - description: Defines secrets for connectors when type is `.xmatters`. - type: object - additionalProperties: true - create_connector_request_xmatters: - title: Create xMatters connector request - description: | - The xMatters connector uses the xMatters Workflow for Elastic to send actionable alerts to on-call xMatters resources. - type: object - required: - - config - - connector_type_id - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_xmatters' - connector_type_id: - type: string - description: The type of connector. - enum: - - .xmatters - example: .xmatters - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_xmatters' - is_deprecated: - type: boolean - description: Indicates whether the connector type is deprecated. - example: false - is_missing_secrets: - type: boolean - description: Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - example: false - is_preconfigured: - type: boolean - description: Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - example: false - connector_response_properties_cases_webhook: - title: Connector request properties for a Webhook - Case Management connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_cases_webhook' - connector_type_id: - description: The type of connector. - type: string - enum: - - .cases-webhook - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_email: - title: Connector response properties for an email connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_email' - connector_type_id: - type: string - description: The type of connector. - enum: - - .email - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_gemini: - title: Connector response properties for a Google Gemini connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_gemini' - connector_type_id: - type: string - description: The type of connector. - enum: - - .gemini - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_index: - title: Connector response properties for an index connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_index' - connector_type_id: - type: string - description: The type of connector. - enum: - - .index - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_jira: - title: Connector response properties for a Jira connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_jira' - connector_type_id: - type: string - description: The type of connector. - enum: - - .jira - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_opsgenie: - title: Connector response properties for an Opsgenie connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_opsgenie' - connector_type_id: - type: string - description: The type of connector. - enum: - - .opsgenie - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_pagerduty: - title: Connector response properties for a PagerDuty connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_pagerduty' - connector_type_id: - type: string - description: The type of connector. - enum: - - .pagerduty - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_resilient: - title: Connector response properties for a IBM Resilient connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_resilient' - connector_type_id: - type: string - description: The type of connector. - enum: - - .resilient - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_serverlog: - title: Connector response properties for a server log connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - type: object - nullable: true - connector_type_id: - type: string - description: The type of connector. - enum: - - .server-log - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_servicenow: - title: Connector response properties for a ServiceNow ITSM connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_servicenow' - connector_type_id: - type: string - description: The type of connector. - enum: - - .servicenow - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_servicenow_itom: - title: Connector response properties for a ServiceNow ITOM connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_servicenow_itom' - connector_type_id: - type: string - description: The type of connector. - enum: - - .servicenow-itom - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_servicenow_sir: - title: Connector response properties for a ServiceNow SecOps connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_servicenow' - connector_type_id: - type: string - description: The type of connector. - enum: - - .servicenow-sir - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_slack: - title: Connector response properties for a Slack connector - type: object - required: - - connector_type_id - - id - - is_preconfigured - - name - properties: - connector_type_id: - type: string - description: The type of connector. - enum: - - .slack - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_slack_api: - title: Connector response properties for a Slack connector - type: object - required: - - connector_type_id - - id - - is_preconfigured - - name - properties: - connector_type_id: - type: string - description: The type of connector. - enum: - - .slack_api - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_swimlane: - title: Connector response properties for a Swimlane connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_swimlane' - connector_type_id: - type: string - description: The type of connector. - enum: - - .swimlane - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_teams: - title: Connector response properties for a Microsoft Teams connector - type: object - required: - - connector_type_id - - id - - is_preconfigured - - name - properties: - connector_type_id: - type: string - description: The type of connector. - enum: - - .teams - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_tines: - title: Connector response properties for a Tines connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_tines' - connector_type_id: - type: string - description: The type of connector. - enum: - - .tines - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_webhook: - title: Connector response properties for a Webhook connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_webhook' - connector_type_id: - type: string - description: The type of connector. - enum: - - .webhook - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties_xmatters: - title: Connector response properties for an xMatters connector - type: object - required: - - config - - connector_type_id - - id - - is_preconfigured - - name - properties: - config: - $ref: '#/components/schemas/config_properties_xmatters' - connector_type_id: - type: string - description: The type of connector. - enum: - - .xmatters - id: - type: string - description: The identifier for the connector. - is_deprecated: - $ref: '#/components/schemas/is_deprecated' - is_missing_secrets: - $ref: '#/components/schemas/is_missing_secrets' - is_preconfigured: - $ref: '#/components/schemas/is_preconfigured' - name: - type: string - description: The display name for the connector. - connector_response_properties: - title: Connector response properties - description: The properties vary depending on the connector type. - oneOf: - - $ref: '#/components/schemas/connector_response_properties_cases_webhook' - - $ref: '#/components/schemas/connector_response_properties_email' - - $ref: '#/components/schemas/connector_response_properties_gemini' - - $ref: '#/components/schemas/connector_response_properties_index' - - $ref: '#/components/schemas/connector_response_properties_jira' - - $ref: '#/components/schemas/connector_response_properties_opsgenie' - - $ref: '#/components/schemas/connector_response_properties_pagerduty' - - $ref: '#/components/schemas/connector_response_properties_resilient' - - $ref: '#/components/schemas/connector_response_properties_serverlog' - - $ref: '#/components/schemas/connector_response_properties_servicenow' - - $ref: '#/components/schemas/connector_response_properties_servicenow_itom' - - $ref: '#/components/schemas/connector_response_properties_servicenow_sir' - - $ref: '#/components/schemas/connector_response_properties_slack' - - $ref: '#/components/schemas/connector_response_properties_slack_api' - - $ref: '#/components/schemas/connector_response_properties_swimlane' - - $ref: '#/components/schemas/connector_response_properties_teams' - - $ref: '#/components/schemas/connector_response_properties_tines' - - $ref: '#/components/schemas/connector_response_properties_webhook' - - $ref: '#/components/schemas/connector_response_properties_xmatters' - discriminator: - propertyName: connector_type_id - mapping: - .cases-webhook: '#/components/schemas/connector_response_properties_cases_webhook' - .email: '#/components/schemas/connector_response_properties_email' - .gemini: '#/components/schemas/connector_response_properties_gemini' - .index: '#/components/schemas/connector_response_properties_index' - .jira: '#/components/schemas/connector_response_properties_jira' - .opsgenie: '#/components/schemas/connector_response_properties_opsgenie' - .pagerduty: '#/components/schemas/connector_response_properties_pagerduty' - .resilient: '#/components/schemas/connector_response_properties_resilient' - .server-log: '#/components/schemas/connector_response_properties_serverlog' - .servicenow: '#/components/schemas/connector_response_properties_servicenow' - .servicenow-itom: '#/components/schemas/connector_response_properties_servicenow_itom' - .servicenow-sir: '#/components/schemas/connector_response_properties_servicenow_sir' - .slack: '#/components/schemas/connector_response_properties_slack' - .slack_api: '#/components/schemas/connector_response_properties_slack_api' - .swimlane: '#/components/schemas/connector_response_properties_swimlane' - .teams: '#/components/schemas/connector_response_properties_teams' - .tines: '#/components/schemas/connector_response_properties_tines' - .webhook: '#/components/schemas/connector_response_properties_webhook' - .xmatters: '#/components/schemas/connector_response_properties_xmatters' - update_connector_request_cases_webhook: - title: Update Webhook - Case Managment connector request - type: object - required: - - config - - name - properties: - config: - $ref: '#/components/schemas/config_properties_cases_webhook' - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_cases_webhook' - update_connector_request_email: - title: Update email connector request - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_email' - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_email' - update_connector_request_gemini: - title: Update Google Gemini connector request - type: object - required: - - config - - name - properties: - config: - $ref: '#/components/schemas/config_properties_gemini' - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_gemini' - update_connector_request_index: - title: Update index connector request - type: object - required: - - config - - name - properties: - config: - $ref: '#/components/schemas/config_properties_index' - name: - type: string - description: The display name for the connector. - update_connector_request_jira: - title: Update Jira connector request - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_jira' - name: - type: string - description: The display name for the connector. - secrets: - $ref: '#/components/schemas/secrets_properties_jira' - update_connector_request_opsgenie: - title: Update Opsgenie connector request - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_opsgenie' - name: - type: string - description: The display name for the connector. - secrets: - $ref: '#/components/schemas/secrets_properties_opsgenie' - update_connector_request_pagerduty: - title: Update PagerDuty connector request - description: | - The PagerDuty connector uses the v2 Events API to trigger, acknowledge, and resolve PagerDuty alerts. - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_pagerduty' - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_pagerduty' - update_connector_request_resilient: - title: Update IBM Resilient connector request - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_resilient' - name: - type: string - description: The display name for the connector. - secrets: - $ref: '#/components/schemas/secrets_properties_resilient' - update_connector_request_serverlog: - title: Update server log connector request - type: object - required: - - name - properties: - name: - type: string - description: The display name for the connector. - update_connector_request_servicenow: - title: Update ServiceNow ITSM connector or ServiceNow SecOps request - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_servicenow' - name: - type: string - description: The display name for the connector. - secrets: - $ref: '#/components/schemas/secrets_properties_servicenow' - update_connector_request_servicenow_itom: - title: Update ServiceNow ITOM connector request - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_servicenow_itom' - name: - type: string - description: The display name for the connector. - secrets: - $ref: '#/components/schemas/secrets_properties_servicenow' - update_connector_request_servicenow_sir: - title: Update ServiceNow SecOps connector request - description: | - The ServiceNow SecOps connector uses the import set API to create ServiceNow security incidents. You can use the connector for rule actions and cases. - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_servicenow' - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_servicenow' - update_connector_request_slack: - title: Update Slack connector request - description: The Slack connector uses Slack Incoming Webhooks. - type: object - required: - - name - - secrets - properties: - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_slack' - update_connector_request_slack_api: - title: Update Slack connector request - type: object - required: - - name - - secrets - properties: - name: - type: string - description: The display name for the connector. - secrets: - $ref: '#/components/schemas/secrets_properties_slack_api' - update_connector_request_swimlane: - title: Update Swimlane connector request - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_swimlane' - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_swimlane' - update_connector_request_teams: - title: Update Microsoft Teams connector request - description: The Microsoft Teams connector uses Incoming Webhooks. - type: object - required: - - name - - secrets - properties: - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_teams' - update_connector_request_tines: - title: Update Tines connector request - description: | - The Tines connector uses Tines Webhook actions to send events via POST request. - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_tines' - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_tines' - update_connector_request_webhook: - title: Update Webhook connector request - description: | - The Webhook connector uses axios to send a POST or PUT request to a web service. - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_webhook' - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_webhook' - update_connector_request_xmatters: - title: Update xMatters connector request - description: | - The xMatters connector uses the xMatters Workflow for Elastic to send actionable alerts to on-call xMatters resources. - type: object - required: - - config - - name - - secrets - properties: - config: - $ref: '#/components/schemas/config_properties_xmatters' - name: - type: string - description: The display name for the connector. - example: my-connector - secrets: - $ref: '#/components/schemas/secrets_properties_xmatters' - connector_types: - title: Connector types - type: string - description: The type of connector. For example, `.email`, `.index`, `.jira`, `.opsgenie`, or `.server-log`. - enum: - - .cases-webhook - - .email - - .gemini - - .index - - .jira - - .opsgenie - - .pagerduty - - .resilient - - .servicenow - - .servicenow-itom - - .servicenow-sir - - .server-log - - .slack - - .slack_api - - .swimlane - - .teams - - .tines - - .webhook - - .xmatters - example: .server-log - features: - type: string - description: | - The feature that uses the connector. Valid values are `alerting`, `cases`, `uptime`, and `siem`. - enum: - - alerting - - cases - - uptime - - siem - run_connector_params_documents: - title: Index connector parameters - description: Test an action that indexes a document into Elasticsearch. - type: object - required: - - documents - properties: - documents: - type: array - description: The documents in JSON format for index connectors. - items: - type: object - additionalProperties: true - run_connector_params_level_message: - title: Server log connector parameters - description: Test an action that writes an entry to the Kibana server log. - type: object - required: - - message - properties: - level: - type: string - description: The log level of the message for server log connectors. - enum: - - debug - - error - - fatal - - info - - trace - - warn - default: info - message: - type: string - description: The message for server log connectors. - run_connector_subaction_addevent: - title: The addEvent subaction - type: object - required: - - subAction - description: The `addEvent` subaction for ServiceNow ITOM connectors. - properties: - subAction: - type: string - description: The action to test. - enum: - - addEvent - subActionParams: - type: object - description: The set of configuration properties for the action. - properties: - additional_info: - type: string - description: Additional information about the event. - description: - type: string - description: The details about the event. - event_class: - type: string - description: A specific instance of the source. - message_key: - type: string - description: All actions sharing this key are associated with the same ServiceNow alert. The default value is `:`. - metric_name: - type: string - description: The name of the metric. - node: - type: string - description: The host that the event was triggered for. - resource: - type: string - description: The name of the resource. - severity: - type: string - description: The severity of the event. - source: - type: string - description: The name of the event source type. - time_of_event: - type: string - description: The time of the event. - type: - type: string - description: The type of event. - run_connector_subaction_closealert: - title: The closeAlert subaction - type: object - required: - - subAction - - subActionParams - description: The `closeAlert` subaction for Opsgenie connectors. - properties: - subAction: - type: string - description: The action to test. - enum: - - closeAlert - subActionParams: - type: object - required: - - alias - properties: - alias: - type: string - description: The unique identifier used for alert deduplication in Opsgenie. The alias must match the value used when creating the alert. - note: - type: string - description: Additional information for the alert. - source: - type: string - description: The display name for the source of the alert. - user: - type: string - description: The display name for the owner. - run_connector_subaction_createalert: - title: The createAlert subaction - type: object - required: - - subAction - - subActionParams - description: The `createAlert` subaction for Opsgenie connectors. - properties: - subAction: - type: string - description: The action to test. - enum: - - createAlert - subActionParams: - type: object - required: - - message - properties: - actions: - type: array - description: The custom actions available to the alert. - items: - type: string - alias: - type: string - description: The unique identifier used for alert deduplication in Opsgenie. - description: - type: string - description: A description that provides detailed information about the alert. - details: - type: object - description: The custom properties of the alert. - additionalProperties: true - example: - key1: value1 - key2: value2 - entity: - type: string - description: The domain of the alert. For example, the application or server name. - message: - type: string - description: The alert message. - note: - type: string - description: Additional information for the alert. - priority: - type: string - description: The priority level for the alert. - enum: - - P1 - - P2 - - P3 - - P4 - - P5 - responders: - type: array - description: | - The entities to receive notifications about the alert. If `type` is `user`, either `id` or `username` is required. If `type` is `team`, either `id` or `name` is required. - items: - type: object - properties: - id: - type: string - description: The identifier for the entity. - name: - type: string - description: The name of the entity. - type: - type: string - description: The type of responders, in this case `escalation`. - enum: - - escalation - - schedule - - team - - user - username: - type: string - description: A valid email address for the user. - source: - type: string - description: The display name for the source of the alert. - tags: - type: array - description: The tags for the alert. - items: - type: string - user: - type: string - description: The display name for the owner. - visibleTo: - type: array - description: The teams and users that the alert will be visible to without sending a notification. Only one of `id`, `name`, or `username` is required. - items: - type: object - required: - - type - properties: - id: - type: string - description: The identifier for the entity. - name: - type: string - description: The name of the entity. - type: - type: string - description: Valid values are `team` and `user`. - enum: - - team - - user - username: - type: string - description: The user name. This property is required only when the `type` is `user`. - run_connector_subaction_fieldsbyissuetype: - title: The fieldsByIssueType subaction - type: object - required: - - subAction - - subActionParams - description: The `fieldsByIssueType` subaction for Jira connectors. - properties: - subAction: - type: string - description: The action to test. - enum: - - fieldsByIssueType - subActionParams: - type: object - required: - - id - properties: - id: - type: string - description: The Jira issue type identifier. - example: 10024 - run_connector_subaction_getchoices: - title: The getChoices subaction - type: object - required: - - subAction - - subActionParams - description: The `getChoices` subaction for ServiceNow ITOM, ServiceNow ITSM, and ServiceNow SecOps connectors. - properties: - subAction: - type: string - description: The action to test. - enum: - - getChoices - subActionParams: - type: object - description: The set of configuration properties for the action. - required: - - fields - properties: - fields: - type: array - description: An array of fields. - items: - type: string - run_connector_subaction_getfields: - title: The getFields subaction - type: object - required: - - subAction - description: The `getFields` subaction for Jira, ServiceNow ITSM, and ServiceNow SecOps connectors. - properties: - subAction: - type: string - description: The action to test. - enum: - - getFields - run_connector_subaction_getincident: - title: The getIncident subaction - type: object - description: The `getIncident` subaction for Jira, ServiceNow ITSM, and ServiceNow SecOps connectors. - required: - - subAction - - subActionParams - properties: - subAction: - type: string - description: The action to test. - enum: - - getIncident - subActionParams: - type: object - required: - - externalId - properties: - externalId: - type: string - description: The Jira, ServiceNow ITSM, or ServiceNow SecOps issue identifier. - example: 71778 - run_connector_subaction_issue: - title: The issue subaction - type: object - required: - - subAction - description: The `issue` subaction for Jira connectors. - properties: - subAction: - type: string - description: The action to test. - enum: - - issue - subActionParams: - type: object - required: - - id - properties: - id: - type: string - description: The Jira issue identifier. - example: 71778 - run_connector_subaction_issues: - title: The issues subaction - type: object - required: - - subAction - - subActionParams - description: The `issues` subaction for Jira connectors. - properties: - subAction: - type: string - description: The action to test. - enum: - - issues - subActionParams: - type: object - required: - - title - properties: - title: - type: string - description: The title of the Jira issue. - run_connector_subaction_issuetypes: - title: The issueTypes subaction - type: object - required: - - subAction - description: The `issueTypes` subaction for Jira connectors. - properties: - subAction: - type: string - description: The action to test. - enum: - - issueTypes - run_connector_subaction_pushtoservice: - title: The pushToService subaction - type: object - required: - - subAction - - subActionParams - description: The `pushToService` subaction for Jira, ServiceNow ITSM, ServiceNow SecOps, and Swimlane connectors. - properties: - subAction: - type: string - description: The action to test. - enum: - - pushToService - subActionParams: - type: object - description: The set of configuration properties for the action. - properties: - comments: - type: array - description: Additional information that is sent to Jira, ServiceNow ITSM, ServiceNow SecOps, or Swimlane. - items: - type: object - properties: - comment: - type: string - description: A comment related to the incident. For example, describe how to troubleshoot the issue. - commentId: - type: integer - description: A unique identifier for the comment. - incident: - type: object - description: Information necessary to create or update a Jira, ServiceNow ITSM, ServiveNow SecOps, or Swimlane incident. - properties: - alertId: - type: string - description: The alert identifier for Swimlane connectors. - caseId: - type: string - description: The case identifier for the incident for Swimlane connectors. - caseName: - type: string - description: The case name for the incident for Swimlane connectors. - category: - type: string - description: The category of the incident for ServiceNow ITSM and ServiceNow SecOps connectors. - correlation_display: - type: string - description: A descriptive label of the alert for correlation purposes for ServiceNow ITSM and ServiceNow SecOps connectors. - correlation_id: - type: string - description: | - The correlation identifier for the security incident for ServiceNow ITSM and ServiveNow SecOps connectors. Connectors using the same correlation ID are associated with the same ServiceNow incident. This value determines whether a new ServiceNow incident is created or an existing one is updated. Modifying this value is optional; if not modified, the rule ID and alert ID are combined as `{{ruleID}}:{{alert ID}}` to form the correlation ID value in ServiceNow. The maximum character length for this value is 100 characters. NOTE: Using the default configuration of `{{ruleID}}:{{alert ID}}` ensures that ServiceNow creates a separate incident record for every generated alert that uses a unique alert ID. If the rule generates multiple alerts that use the same alert IDs, ServiceNow creates and continually updates a single incident record for the alert. - description: - type: string - description: The description of the incident for Jira, ServiceNow ITSM, ServiceNow SecOps, and Swimlane connectors. - dest_ip: - description: | - A list of destination IP addresses related to the security incident for ServiceNow SecOps connectors. The IPs are added as observables to the security incident. - oneOf: - - type: string - - type: array - items: - type: string - externalId: - type: string - description: | - The Jira, ServiceNow ITSM, or ServiceNow SecOps issue identifier. If present, the incident is updated. Otherwise, a new incident is created. - impact: - type: string - description: The impact of the incident for ServiceNow ITSM connectors. - issueType: - type: integer - description: The type of incident for Jira connectors. For example, 10006. To obtain the list of valid values, set `subAction` to `issueTypes`. - labels: - type: array - items: - type: string - description: | - The labels for the incident for Jira connectors. NOTE: Labels cannot contain spaces. - malware_hash: - description: A list of malware hashes related to the security incident for ServiceNow SecOps connectors. The hashes are added as observables to the security incident. - oneOf: - - type: string - - type: array - items: - type: string - malware_url: - type: string - description: A list of malware URLs related to the security incident for ServiceNow SecOps connectors. The URLs are added as observables to the security incident. - oneOf: - - type: string - - type: array - items: - type: string - parent: - type: string - description: The ID or key of the parent issue for Jira connectors. Applies only to `Sub-task` types of issues. - priority: - type: string - description: The priority of the incident in Jira and ServiceNow SecOps connectors. - ruleName: - type: string - description: The rule name for Swimlane connectors. - severity: - type: string - description: The severity of the incident for ServiceNow ITSM and Swimlane connectors. - short_description: - type: string - description: | - A short description of the incident for ServiceNow ITSM and ServiceNow SecOps connectors. It is used for searching the contents of the knowledge base. - source_ip: - description: A list of source IP addresses related to the security incident for ServiceNow SecOps connectors. The IPs are added as observables to the security incident. - oneOf: - - type: string - - type: array - items: - type: string - subcategory: - type: string - description: The subcategory of the incident for ServiceNow ITSM and ServiceNow SecOps connectors. - summary: - type: string - description: A summary of the incident for Jira connectors. - title: - type: string - description: | - A title for the incident for Jira connectors. It is used for searching the contents of the knowledge base. - urgency: - type: string - description: The urgency of the incident for ServiceNow ITSM connectors. - action_response_properties: - title: Action response properties - description: The properties vary depending on the action type. - type: object - properties: - actionTypeId: - type: string - config: - type: object - id: - type: string - isDeprecated: - type: boolean - description: Indicates whether the action type is deprecated. - isMissingSecrets: - type: boolean - description: Indicates whether secrets are missing for the action. - isPreconfigured: - type: boolean - description: Indicates whether it is a preconfigured action. - name: - type: string - run_connector_general_response: - title: Response from running a connector. - type: object - required: - - connector_id - - status - properties: - connector_id: - type: string - description: The identifier for the connector. - data: - oneOf: - - type: object - description: Information returned from the action. - additionalProperties: true - - type: array - description: An array of information returned from the action. - items: - type: object - status: - type: string - description: The status of the action. - enum: - - error - - ok - legacy_run_connector_general_response: - title: Response from legacy endpoint for running a connector. - type: object - properties: - actionId: - type: string - data: - oneOf: - - type: object - description: Information returned from the action. - additionalProperties: true - - type: array - description: An array of information returned from the action. - items: - type: object - status: - type: string - description: The status of the action. - bad_request_error: - title: Bad request - type: object - properties: - error: - type: string - example: Bad Request - enum: - - Bad Request - message: - type: string - example: 'error validating action type config: [index]: expected value of type [string] but got [undefined]' - statusCode: - type: integer - example: 400 - enum: - - 400 - authorization_error: - type: object - title: Unauthorized response - properties: - error: - type: string - example: Unauthorized - enum: - - Unauthorized - message: - type: string - statusCode: - type: integer - example: 401 - enum: - - 401 - object_not_found_error: - type: object - title: Not found response - properties: - error: - type: string - example: Not Found - enum: - - Not Found - message: - type: string - example: Saved object [action/baf33fc0-920c-11ed-b36a-874bd1548a00] not found - statusCode: - type: integer - example: 404 - enum: - - 404 - examples: - create_index_connector_request: - summary: Create an index connector. - value: - name: my-connector - connector_type_id: .index - config: - index: test-index - create_index_connector_response: - summary: A new index connector. - value: - id: c55b6eb0-6bad-11eb-9f3b-611eebc6c3ad - connector_type_id: .index - name: my-connector - config: - index: test-index - refresh: false - executionTimeField: null - is_preconfigured: false - is_deprecated: false - is_missing_secrets: false - get_connector_response: - summary: A list of connector types - value: - id: df770e30-8b8b-11ed-a780-3b746c987a81 - name: my_server_log_connector - config: {} - connector_type_id: .server-log - is_preconfigured: false - is_deprecated: false - is_missing_secrets: false - update_index_connector_request: - summary: Update an index connector. - value: - name: updated-connector - config: - index: updated-index - get_connectors_response: - summary: A list of connectors - value: - - id: preconfigured-email-connector - name: my-preconfigured-email-notification - connector_type_id: .email - is_preconfigured: true - is_deprecated: false - referenced_by_count: 0 - - id: e07d0c80-8b8b-11ed-a780-3b746c987a81 - name: my-index-connector - config: - index: test-index - refresh: false - executionTimeField: null - connector_type_id: .index - is_preconfigured: false - is_deprecated: false - referenced_by_count: 2 - is_missing_secrets: false - get_connector_types_response: - summary: A list of connector types - value: - - id: .swimlane - name: Swimlane - enabled: true - enabled_in_config: true - enabled_in_license: true - minimum_license_required: gold - supported_feature_ids: - - alerting - - cases - - siem - - id: .index - name: Index - enabled: true - enabled_in_config: true - enabled_in_license: true - minimum_license_required: basic - supported_feature_ids: - - alerting - - uptime - - siem - - id: .server-log - name: Server log - enabled: true - enabled_in_config: true - enabled_in_license: true - minimum_license_required: basic - supported_feature_ids: - - alerting - - uptime - run_index_connector_request: - summary: Run an index connector. - value: - params: - documents: - - id: my_doc_id - name: my_doc_name - message: hello, world - run_jira_connector_request: - summary: Run a Jira connector to retrieve the list of issue types. - value: - params: - subAction: issueTypes - run_server_log_connector_request: - summary: Run a server log connector. - value: - params: - level: warn - message: Test warning message. - run_servicenow_itom_connector_request: - summary: Run a ServiceNow ITOM connector to retrieve the list of choices. - value: - params: - subAction: getChoices - subActionParams: - fields: - - severity - - urgency - run_swimlane_connector_request: - summary: Run a Swimlane connector to create an incident. - value: - params: - subAction: pushToService - subActionParams: - comments: - - commentId: 1 - comment: A comment about the incident. - incident: - caseId: '1000' - caseName: Case name - description: Description of the incident. - run_index_connector_response: - summary: Response from running an index connector. - value: - connector_id: fd38c600-96a5-11ed-bb79-353b74189cba - data: - errors: false - items: - - create: - _id: 4JtvwYUBrcyxt2NnfW3y - _index: my-index - _primary_term: 1 - _seq_no: 0 - _shards: - failed: 0 - successful: 1 - total: 2 - _version: 1 - result: created - status: 201 - took: 135 - status: ok - run_jira_connector_response: - summary: Response from retrieving the list of issue types for a Jira connector. - value: - connector_id: b3aad810-edbe-11ec-82d1-11348ecbf4a6 - data: - - id: 10024 - name: Improvement - - id: 10006 - name: Task - - id: 10007 - name: Sub-task - - id: 10025 - name: New Feature - - id: 10023 - name: Bug - - id: 10000 - name: Epic - status: ok - run_server_log_connector_response: - summary: Response from running a server log connector. - value: - connector_id: 7fc7b9a0-ecc9-11ec-8736-e7d63118c907 - status: ok - run_servicenow_itom_connector_response: - summary: Response from retrieving the list of choices for a ServiceNow ITOM connector. - value: - connector_id: 9d9be270-2fd2-11ed-b0e0-87533c532698 - data: - - dependent_value: '' - element: severity - label: Critical - value: 1 - - dependent_value: '' - element: severity - label: Major - value: 2 - - dependent_value: '' - element: severity - label: Minor - value: 3 - - dependent_value: '' - element: severity - label: Warning - value: 4 - - dependent_value: '' - element: severity - label: OK - value: 5 - - dependent_value: '' - element: severity - label: Clear - value: 0 - - dependent_value: '' - element: urgency - label: 1 - High - value: 1 - - dependent_value: '' - element: urgency - label: 2 - Medium - value: 2 - - dependent_value: '' - element: urgency - label: 3 - Low - value: 3 - status: ok - run_swimlane_connector_response: - summary: Response from creating a Swimlane incident. - value: - connector_id: a4746470-2f94-11ed-b0e0-87533c532698 - data: - id: aKPmBHWzmdRQtx6Mx - title: TEST-457 - url: https://elastic.swimlane.url.us/record/aNcL2xniGHGpa2AHb/aKPmBHWzmdRQtx6Mx - pushedDate: '2022-09-08T16:52:27.866Z' - comments: - - commentId: 1 - pushedDate: '2022-09-08T16:52:27.865Z' - status: ok - responses: - 200_actions: - description: Indicates a successful call. - content: - application/json: - schema: - $ref: '#/components/schemas/action_response_properties' -security: - - basicAuth: [] - - apiKeyAuth: [] diff --git a/generated/connectors/connectors.gen.go b/generated/connectors/connectors.gen.go deleted file mode 100644 index b059ea87e..000000000 --- a/generated/connectors/connectors.gen.go +++ /dev/null @@ -1,5720 +0,0 @@ -// Package connectors provides primitives to interact with the openapi HTTP API. -// -// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.4.1 DO NOT EDIT. -package connectors - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "net/url" - "strings" - - "github.com/oapi-codegen/runtime" -) - -const ( - ApiKeyAuthScopes = "apiKeyAuth.Scopes" - BasicAuthScopes = "basicAuth.Scopes" -) - -// Defines values for AuthorizationErrorError. -const ( - Unauthorized AuthorizationErrorError = "Unauthorized" -) - -// Defines values for AuthorizationErrorStatusCode. -const ( - N401 AuthorizationErrorStatusCode = 401 -) - -// Defines values for BadRequestErrorError. -const ( - BadRequest BadRequestErrorError = "Bad Request" -) - -// Defines values for BadRequestErrorStatusCode. -const ( - N400 BadRequestErrorStatusCode = 400 -) - -// Defines values for ConfigPropertiesCasesWebhookCreateCommentMethod. -const ( - ConfigPropertiesCasesWebhookCreateCommentMethodPatch ConfigPropertiesCasesWebhookCreateCommentMethod = "patch" - ConfigPropertiesCasesWebhookCreateCommentMethodPost ConfigPropertiesCasesWebhookCreateCommentMethod = "post" - ConfigPropertiesCasesWebhookCreateCommentMethodPut ConfigPropertiesCasesWebhookCreateCommentMethod = "put" -) - -// Defines values for ConfigPropertiesCasesWebhookCreateIncidentMethod. -const ( - ConfigPropertiesCasesWebhookCreateIncidentMethodPatch ConfigPropertiesCasesWebhookCreateIncidentMethod = "patch" - ConfigPropertiesCasesWebhookCreateIncidentMethodPost ConfigPropertiesCasesWebhookCreateIncidentMethod = "post" - ConfigPropertiesCasesWebhookCreateIncidentMethodPut ConfigPropertiesCasesWebhookCreateIncidentMethod = "put" -) - -// Defines values for ConfigPropertiesCasesWebhookUpdateIncidentMethod. -const ( - ConfigPropertiesCasesWebhookUpdateIncidentMethodPatch ConfigPropertiesCasesWebhookUpdateIncidentMethod = "patch" - ConfigPropertiesCasesWebhookUpdateIncidentMethodPost ConfigPropertiesCasesWebhookUpdateIncidentMethod = "post" - ConfigPropertiesCasesWebhookUpdateIncidentMethodPut ConfigPropertiesCasesWebhookUpdateIncidentMethod = "put" -) - -// Defines values for ConfigPropertiesSwimlaneConnectorType. -const ( - ConfigPropertiesSwimlaneConnectorTypeAlerts ConfigPropertiesSwimlaneConnectorType = "alerts" - ConfigPropertiesSwimlaneConnectorTypeAll ConfigPropertiesSwimlaneConnectorType = "all" - ConfigPropertiesSwimlaneConnectorTypeCases ConfigPropertiesSwimlaneConnectorType = "cases" -) - -// Defines values for ConfigPropertiesWebhookMethod. -const ( - ConfigPropertiesWebhookMethodPatch ConfigPropertiesWebhookMethod = "patch" - ConfigPropertiesWebhookMethodPost ConfigPropertiesWebhookMethod = "post" - ConfigPropertiesWebhookMethodPut ConfigPropertiesWebhookMethod = "put" -) - -// Defines values for ConnectorResponsePropertiesCasesWebhookConnectorTypeId. -const ( - ConnectorResponsePropertiesCasesWebhookConnectorTypeIdDotCasesWebhook ConnectorResponsePropertiesCasesWebhookConnectorTypeId = ".cases-webhook" -) - -// Defines values for ConnectorResponsePropertiesEmailConnectorTypeId. -const ( - ConnectorResponsePropertiesEmailConnectorTypeIdDotEmail ConnectorResponsePropertiesEmailConnectorTypeId = ".email" -) - -// Defines values for ConnectorResponsePropertiesGeminiConnectorTypeId. -const ( - ConnectorResponsePropertiesGeminiConnectorTypeIdDotGemini ConnectorResponsePropertiesGeminiConnectorTypeId = ".gemini" -) - -// Defines values for ConnectorResponsePropertiesIndexConnectorTypeId. -const ( - ConnectorResponsePropertiesIndexConnectorTypeIdDotIndex ConnectorResponsePropertiesIndexConnectorTypeId = ".index" -) - -// Defines values for ConnectorResponsePropertiesJiraConnectorTypeId. -const ( - ConnectorResponsePropertiesJiraConnectorTypeIdDotJira ConnectorResponsePropertiesJiraConnectorTypeId = ".jira" -) - -// Defines values for ConnectorResponsePropertiesOpsgenieConnectorTypeId. -const ( - ConnectorResponsePropertiesOpsgenieConnectorTypeIdDotOpsgenie ConnectorResponsePropertiesOpsgenieConnectorTypeId = ".opsgenie" -) - -// Defines values for ConnectorResponsePropertiesPagerdutyConnectorTypeId. -const ( - ConnectorResponsePropertiesPagerdutyConnectorTypeIdDotPagerduty ConnectorResponsePropertiesPagerdutyConnectorTypeId = ".pagerduty" -) - -// Defines values for ConnectorResponsePropertiesResilientConnectorTypeId. -const ( - ConnectorResponsePropertiesResilientConnectorTypeIdDotResilient ConnectorResponsePropertiesResilientConnectorTypeId = ".resilient" -) - -// Defines values for ConnectorResponsePropertiesServerlogConnectorTypeId. -const ( - ConnectorResponsePropertiesServerlogConnectorTypeIdDotServerLog ConnectorResponsePropertiesServerlogConnectorTypeId = ".server-log" -) - -// Defines values for ConnectorResponsePropertiesServicenowConnectorTypeId. -const ( - ConnectorResponsePropertiesServicenowConnectorTypeIdDotServicenow ConnectorResponsePropertiesServicenowConnectorTypeId = ".servicenow" -) - -// Defines values for ConnectorResponsePropertiesServicenowItomConnectorTypeId. -const ( - ConnectorResponsePropertiesServicenowItomConnectorTypeIdDotServicenowItom ConnectorResponsePropertiesServicenowItomConnectorTypeId = ".servicenow-itom" -) - -// Defines values for ConnectorResponsePropertiesServicenowSirConnectorTypeId. -const ( - ConnectorResponsePropertiesServicenowSirConnectorTypeIdDotServicenowSir ConnectorResponsePropertiesServicenowSirConnectorTypeId = ".servicenow-sir" -) - -// Defines values for ConnectorResponsePropertiesSlackConnectorTypeId. -const ( - ConnectorResponsePropertiesSlackConnectorTypeIdDotSlack ConnectorResponsePropertiesSlackConnectorTypeId = ".slack" -) - -// Defines values for ConnectorResponsePropertiesSlackApiConnectorTypeId. -const ( - ConnectorResponsePropertiesSlackApiConnectorTypeIdDotSlackApi ConnectorResponsePropertiesSlackApiConnectorTypeId = ".slack_api" -) - -// Defines values for ConnectorResponsePropertiesSwimlaneConnectorTypeId. -const ( - ConnectorResponsePropertiesSwimlaneConnectorTypeIdDotSwimlane ConnectorResponsePropertiesSwimlaneConnectorTypeId = ".swimlane" -) - -// Defines values for ConnectorResponsePropertiesTeamsConnectorTypeId. -const ( - ConnectorResponsePropertiesTeamsConnectorTypeIdDotTeams ConnectorResponsePropertiesTeamsConnectorTypeId = ".teams" -) - -// Defines values for ConnectorResponsePropertiesTinesConnectorTypeId. -const ( - ConnectorResponsePropertiesTinesConnectorTypeIdDotTines ConnectorResponsePropertiesTinesConnectorTypeId = ".tines" -) - -// Defines values for ConnectorResponsePropertiesWebhookConnectorTypeId. -const ( - ConnectorResponsePropertiesWebhookConnectorTypeIdDotWebhook ConnectorResponsePropertiesWebhookConnectorTypeId = ".webhook" -) - -// Defines values for ConnectorResponsePropertiesXmattersConnectorTypeId. -const ( - ConnectorResponsePropertiesXmattersConnectorTypeIdDotXmatters ConnectorResponsePropertiesXmattersConnectorTypeId = ".xmatters" -) - -// Defines values for ConnectorTypes. -const ( - ConnectorTypesDotCasesWebhook ConnectorTypes = ".cases-webhook" - ConnectorTypesDotEmail ConnectorTypes = ".email" - ConnectorTypesDotGemini ConnectorTypes = ".gemini" - ConnectorTypesDotIndex ConnectorTypes = ".index" - ConnectorTypesDotJira ConnectorTypes = ".jira" - ConnectorTypesDotOpsgenie ConnectorTypes = ".opsgenie" - ConnectorTypesDotPagerduty ConnectorTypes = ".pagerduty" - ConnectorTypesDotResilient ConnectorTypes = ".resilient" - ConnectorTypesDotServerLog ConnectorTypes = ".server-log" - ConnectorTypesDotServicenow ConnectorTypes = ".servicenow" - ConnectorTypesDotServicenowItom ConnectorTypes = ".servicenow-itom" - ConnectorTypesDotServicenowSir ConnectorTypes = ".servicenow-sir" - ConnectorTypesDotSlack ConnectorTypes = ".slack" - ConnectorTypesDotSlackApi ConnectorTypes = ".slack_api" - ConnectorTypesDotSwimlane ConnectorTypes = ".swimlane" - ConnectorTypesDotTeams ConnectorTypes = ".teams" - ConnectorTypesDotTines ConnectorTypes = ".tines" - ConnectorTypesDotWebhook ConnectorTypes = ".webhook" - ConnectorTypesDotXmatters ConnectorTypes = ".xmatters" -) - -// Defines values for CreateConnectorRequestCasesWebhookConnectorTypeId. -const ( - DotCasesWebhook CreateConnectorRequestCasesWebhookConnectorTypeId = ".cases-webhook" -) - -// Defines values for CreateConnectorRequestEmailConnectorTypeId. -const ( - CreateConnectorRequestEmailConnectorTypeIdDotEmail CreateConnectorRequestEmailConnectorTypeId = ".email" -) - -// Defines values for CreateConnectorRequestGeminiConnectorTypeId. -const ( - CreateConnectorRequestGeminiConnectorTypeIdDotGemini CreateConnectorRequestGeminiConnectorTypeId = ".gemini" -) - -// Defines values for CreateConnectorRequestIndexConnectorTypeId. -const ( - CreateConnectorRequestIndexConnectorTypeIdDotIndex CreateConnectorRequestIndexConnectorTypeId = ".index" -) - -// Defines values for CreateConnectorRequestJiraConnectorTypeId. -const ( - CreateConnectorRequestJiraConnectorTypeIdDotJira CreateConnectorRequestJiraConnectorTypeId = ".jira" -) - -// Defines values for CreateConnectorRequestOpsgenieConnectorTypeId. -const ( - CreateConnectorRequestOpsgenieConnectorTypeIdDotOpsgenie CreateConnectorRequestOpsgenieConnectorTypeId = ".opsgenie" -) - -// Defines values for CreateConnectorRequestPagerdutyConnectorTypeId. -const ( - CreateConnectorRequestPagerdutyConnectorTypeIdDotPagerduty CreateConnectorRequestPagerdutyConnectorTypeId = ".pagerduty" -) - -// Defines values for CreateConnectorRequestResilientConnectorTypeId. -const ( - CreateConnectorRequestResilientConnectorTypeIdDotResilient CreateConnectorRequestResilientConnectorTypeId = ".resilient" -) - -// Defines values for CreateConnectorRequestServerlogConnectorTypeId. -const ( - CreateConnectorRequestServerlogConnectorTypeIdDotServerLog CreateConnectorRequestServerlogConnectorTypeId = ".server-log" -) - -// Defines values for CreateConnectorRequestServicenowConnectorTypeId. -const ( - CreateConnectorRequestServicenowConnectorTypeIdDotServicenow CreateConnectorRequestServicenowConnectorTypeId = ".servicenow" -) - -// Defines values for CreateConnectorRequestServicenowItomConnectorTypeId. -const ( - CreateConnectorRequestServicenowItomConnectorTypeIdDotServicenowItom CreateConnectorRequestServicenowItomConnectorTypeId = ".servicenow-itom" -) - -// Defines values for CreateConnectorRequestServicenowSirConnectorTypeId. -const ( - CreateConnectorRequestServicenowSirConnectorTypeIdDotServicenowSir CreateConnectorRequestServicenowSirConnectorTypeId = ".servicenow-sir" -) - -// Defines values for CreateConnectorRequestSlackConnectorTypeId. -const ( - CreateConnectorRequestSlackConnectorTypeIdDotSlack CreateConnectorRequestSlackConnectorTypeId = ".slack" -) - -// Defines values for CreateConnectorRequestSlackApiConnectorTypeId. -const ( - CreateConnectorRequestSlackApiConnectorTypeIdDotSlackApi CreateConnectorRequestSlackApiConnectorTypeId = ".slack_api" -) - -// Defines values for CreateConnectorRequestSwimlaneConnectorTypeId. -const ( - CreateConnectorRequestSwimlaneConnectorTypeIdDotSwimlane CreateConnectorRequestSwimlaneConnectorTypeId = ".swimlane" -) - -// Defines values for CreateConnectorRequestTeamsConnectorTypeId. -const ( - CreateConnectorRequestTeamsConnectorTypeIdDotTeams CreateConnectorRequestTeamsConnectorTypeId = ".teams" -) - -// Defines values for CreateConnectorRequestTinesConnectorTypeId. -const ( - CreateConnectorRequestTinesConnectorTypeIdDotTines CreateConnectorRequestTinesConnectorTypeId = ".tines" -) - -// Defines values for CreateConnectorRequestWebhookConnectorTypeId. -const ( - CreateConnectorRequestWebhookConnectorTypeIdDotWebhook CreateConnectorRequestWebhookConnectorTypeId = ".webhook" -) - -// Defines values for CreateConnectorRequestXmattersConnectorTypeId. -const ( - CreateConnectorRequestXmattersConnectorTypeIdDotXmatters CreateConnectorRequestXmattersConnectorTypeId = ".xmatters" -) - -// Defines values for Features. -const ( - FeaturesAlerting Features = "alerting" - FeaturesCases Features = "cases" - FeaturesSiem Features = "siem" - FeaturesUptime Features = "uptime" -) - -// Defines values for ObjectNotFoundErrorError. -const ( - NotFound ObjectNotFoundErrorError = "Not Found" -) - -// Defines values for ObjectNotFoundErrorStatusCode. -const ( - N404 ObjectNotFoundErrorStatusCode = 404 -) - -// Defines values for RunConnectorGeneralResponseStatus. -const ( - RunConnectorGeneralResponseStatusError RunConnectorGeneralResponseStatus = "error" - RunConnectorGeneralResponseStatusOk RunConnectorGeneralResponseStatus = "ok" -) - -// Defines values for RunConnectorParamsLevelMessageLevel. -const ( - RunConnectorParamsLevelMessageLevelDebug RunConnectorParamsLevelMessageLevel = "debug" - RunConnectorParamsLevelMessageLevelError RunConnectorParamsLevelMessageLevel = "error" - RunConnectorParamsLevelMessageLevelFatal RunConnectorParamsLevelMessageLevel = "fatal" - RunConnectorParamsLevelMessageLevelInfo RunConnectorParamsLevelMessageLevel = "info" - RunConnectorParamsLevelMessageLevelTrace RunConnectorParamsLevelMessageLevel = "trace" - RunConnectorParamsLevelMessageLevelWarn RunConnectorParamsLevelMessageLevel = "warn" -) - -// Defines values for RunConnectorSubactionAddeventSubAction. -const ( - AddEvent RunConnectorSubactionAddeventSubAction = "addEvent" -) - -// Defines values for RunConnectorSubactionClosealertSubAction. -const ( - CloseAlert RunConnectorSubactionClosealertSubAction = "closeAlert" -) - -// Defines values for RunConnectorSubactionCreatealertSubAction. -const ( - CreateAlert RunConnectorSubactionCreatealertSubAction = "createAlert" -) - -// Defines values for RunConnectorSubactionCreatealertSubActionParamsPriority. -const ( - P1 RunConnectorSubactionCreatealertSubActionParamsPriority = "P1" - P2 RunConnectorSubactionCreatealertSubActionParamsPriority = "P2" - P3 RunConnectorSubactionCreatealertSubActionParamsPriority = "P3" - P4 RunConnectorSubactionCreatealertSubActionParamsPriority = "P4" - P5 RunConnectorSubactionCreatealertSubActionParamsPriority = "P5" -) - -// Defines values for RunConnectorSubactionCreatealertSubActionParamsRespondersType. -const ( - RunConnectorSubactionCreatealertSubActionParamsRespondersTypeEscalation RunConnectorSubactionCreatealertSubActionParamsRespondersType = "escalation" - RunConnectorSubactionCreatealertSubActionParamsRespondersTypeSchedule RunConnectorSubactionCreatealertSubActionParamsRespondersType = "schedule" - RunConnectorSubactionCreatealertSubActionParamsRespondersTypeTeam RunConnectorSubactionCreatealertSubActionParamsRespondersType = "team" - RunConnectorSubactionCreatealertSubActionParamsRespondersTypeUser RunConnectorSubactionCreatealertSubActionParamsRespondersType = "user" -) - -// Defines values for RunConnectorSubactionCreatealertSubActionParamsVisibleToType. -const ( - RunConnectorSubactionCreatealertSubActionParamsVisibleToTypeTeam RunConnectorSubactionCreatealertSubActionParamsVisibleToType = "team" - RunConnectorSubactionCreatealertSubActionParamsVisibleToTypeUser RunConnectorSubactionCreatealertSubActionParamsVisibleToType = "user" -) - -// Defines values for RunConnectorSubactionFieldsbyissuetypeSubAction. -const ( - FieldsByIssueType RunConnectorSubactionFieldsbyissuetypeSubAction = "fieldsByIssueType" -) - -// Defines values for RunConnectorSubactionGetchoicesSubAction. -const ( - GetChoices RunConnectorSubactionGetchoicesSubAction = "getChoices" -) - -// Defines values for RunConnectorSubactionGetfieldsSubAction. -const ( - GetFields RunConnectorSubactionGetfieldsSubAction = "getFields" -) - -// Defines values for RunConnectorSubactionGetincidentSubAction. -const ( - GetIncident RunConnectorSubactionGetincidentSubAction = "getIncident" -) - -// Defines values for RunConnectorSubactionIssueSubAction. -const ( - Issue RunConnectorSubactionIssueSubAction = "issue" -) - -// Defines values for RunConnectorSubactionIssuesSubAction. -const ( - Issues RunConnectorSubactionIssuesSubAction = "issues" -) - -// Defines values for RunConnectorSubactionIssuetypesSubAction. -const ( - IssueTypes RunConnectorSubactionIssuetypesSubAction = "issueTypes" -) - -// Defines values for RunConnectorSubactionPushtoserviceSubAction. -const ( - PushToService RunConnectorSubactionPushtoserviceSubAction = "pushToService" -) - -// ActionResponseProperties The properties vary depending on the action type. -type ActionResponseProperties struct { - ActionTypeId *string `json:"actionTypeId,omitempty"` - Config *map[string]interface{} `json:"config,omitempty"` - Id *string `json:"id,omitempty"` - - // IsDeprecated Indicates whether the action type is deprecated. - IsDeprecated *bool `json:"isDeprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the action. - IsMissingSecrets *bool `json:"isMissingSecrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured action. - IsPreconfigured *bool `json:"isPreconfigured,omitempty"` - Name *string `json:"name,omitempty"` -} - -// AuthorizationError defines model for authorization_error. -type AuthorizationError struct { - Error *AuthorizationErrorError `json:"error,omitempty"` - Message *string `json:"message,omitempty"` - StatusCode *AuthorizationErrorStatusCode `json:"statusCode,omitempty"` -} - -// AuthorizationErrorError defines model for AuthorizationError.Error. -type AuthorizationErrorError string - -// AuthorizationErrorStatusCode defines model for AuthorizationError.StatusCode. -type AuthorizationErrorStatusCode int - -// BadRequestError defines model for bad_request_error. -type BadRequestError struct { - Error *BadRequestErrorError `json:"error,omitempty"` - Message *string `json:"message,omitempty"` - StatusCode *BadRequestErrorStatusCode `json:"statusCode,omitempty"` -} - -// BadRequestErrorError defines model for BadRequestError.Error. -type BadRequestErrorError string - -// BadRequestErrorStatusCode defines model for BadRequestError.StatusCode. -type BadRequestErrorStatusCode int - -// ConfigPropertiesCasesWebhook Defines properties for connectors when type is `.cases-webhook`. -type ConfigPropertiesCasesWebhook struct { - // CreateCommentJson A JSON payload sent to the create comment URL to create a case comment. You can use variables to add Kibana Cases data to the payload. The required variable is `case.comment`. Due to Mustache template variables (the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated once the Mustache variables have been placed when the REST method runs. Manually ensure that the JSON is valid, disregarding the Mustache variables, so the later validation will pass. - CreateCommentJson *string `json:"createCommentJson,omitempty"` - - // CreateCommentMethod The REST API HTTP request method to create a case comment in the third-party system. Valid values are `patch`, `post`, and `put`. - CreateCommentMethod *ConfigPropertiesCasesWebhookCreateCommentMethod `json:"createCommentMethod,omitempty"` - - // CreateCommentUrl The REST API URL to create a case comment by ID in the third-party system. You can use a variable to add the external system ID to the URL. If you are using the `xpack.actions.allowedHosts setting`, add the hostname to the allowed hosts. - CreateCommentUrl *string `json:"createCommentUrl,omitempty"` - - // CreateIncidentJson A JSON payload sent to the create case URL to create a case. You can use variables to add case data to the payload. Required variables are `case.title` and `case.description`. Due to Mustache template variables (which is the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid to avoid future validation errors; disregard Mustache variables during your review. - CreateIncidentJson string `json:"createIncidentJson"` - - // CreateIncidentMethod The REST API HTTP request method to create a case in the third-party system. Valid values are `patch`, `post`, and `put`. - CreateIncidentMethod *ConfigPropertiesCasesWebhookCreateIncidentMethod `json:"createIncidentMethod,omitempty"` - - // CreateIncidentResponseKey The JSON key in the create case response that contains the external case ID. - CreateIncidentResponseKey string `json:"createIncidentResponseKey"` - - // CreateIncidentUrl The REST API URL to create a case in the third-party system. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - CreateIncidentUrl string `json:"createIncidentUrl"` - - // GetIncidentResponseExternalTitleKey The JSON key in get case response that contains the external case title. - GetIncidentResponseExternalTitleKey string `json:"getIncidentResponseExternalTitleKey"` - - // GetIncidentUrl The REST API URL to get the case by ID from the third-party system. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. You can use a variable to add the external system ID to the URL. Due to Mustache template variables (the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid, disregarding the Mustache variables, so the later validation will pass. - GetIncidentUrl string `json:"getIncidentUrl"` - - // HasAuth If true, a username and password for login type authentication must be provided. - HasAuth *bool `json:"hasAuth,omitempty"` - - // Headers A set of key-value pairs sent as headers with the request URLs for the create case, update case, get case, and create comment methods. - Headers *string `json:"headers,omitempty"` - - // UpdateIncidentJson The JSON payload sent to the update case URL to update the case. You can use variables to add Kibana Cases data to the payload. Required variables are `case.title` and `case.description`. Due to Mustache template variables (which is the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid to avoid future validation errors; disregard Mustache variables during your review. - UpdateIncidentJson string `json:"updateIncidentJson"` - - // UpdateIncidentMethod The REST API HTTP request method to update the case in the third-party system. Valid values are `patch`, `post`, and `put`. - UpdateIncidentMethod *ConfigPropertiesCasesWebhookUpdateIncidentMethod `json:"updateIncidentMethod,omitempty"` - - // UpdateIncidentUrl The REST API URL to update the case by ID in the third-party system. You can use a variable to add the external system ID to the URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - UpdateIncidentUrl string `json:"updateIncidentUrl"` - - // ViewIncidentUrl The URL to view the case in the external system. You can use variables to add the external system ID or external system title to the URL. - ViewIncidentUrl string `json:"viewIncidentUrl"` -} - -// ConfigPropertiesCasesWebhookCreateCommentMethod The REST API HTTP request method to create a case comment in the third-party system. Valid values are `patch`, `post`, and `put`. -type ConfigPropertiesCasesWebhookCreateCommentMethod string - -// ConfigPropertiesCasesWebhookCreateIncidentMethod The REST API HTTP request method to create a case in the third-party system. Valid values are `patch`, `post`, and `put`. -type ConfigPropertiesCasesWebhookCreateIncidentMethod string - -// ConfigPropertiesCasesWebhookUpdateIncidentMethod The REST API HTTP request method to update the case in the third-party system. Valid values are `patch`, `post`, and `put`. -type ConfigPropertiesCasesWebhookUpdateIncidentMethod string - -// ConfigPropertiesEmail Defines properties for connectors when type is `.email`. -type ConfigPropertiesEmail struct { - ClientId *string `json:"clientId"` - From *string `json:"from,omitempty"` - HasAuth *bool `json:"hasAuth,omitempty"` - Host *string `json:"host,omitempty"` - OauthTokenUrl *string `json:"oauthTokenUrl"` - Port *int `json:"port,omitempty"` - Secure *bool `json:"secure"` - Service *string `json:"service,omitempty"` - TenantId *string `json:"tenantId"` -} - -// ConfigPropertiesGemini Defines properties for connectors when type is `.gemini`. -type ConfigPropertiesGemini struct { - // ApiUrl The Google Gemini request URL. - ApiUrl string `json:"apiUrl"` - - // DefaultModel The generative artificial intelligence model for Google Gemini to use. - DefaultModel *string `json:"defaultModel,omitempty"` - - // GcpProjectID The Google ProjectID that has Vertex AI endpoint enabled. - GcpProjectID string `json:"gcpProjectID"` - - // GcpRegion The GCP region where the Vertex AI endpoint enabled. - GcpRegion string `json:"gcpRegion"` -} - -// ConfigPropertiesIndex Defines properties for connectors when type is `.index`. -type ConfigPropertiesIndex struct { - // ExecutionTimeField Specifies a field that will contain the time the alert condition was detected. - ExecutionTimeField *string `json:"executionTimeField"` - - // Index The Elasticsearch index to be written to. - Index string `json:"index"` - - // Refresh The refresh policy for the write request, which affects when changes are made visible to search. Refer to the refresh setting for Elasticsearch document APIs. - Refresh *bool `json:"refresh,omitempty"` -} - -// ConfigPropertiesJira Defines properties for connectors when type is `.jira`. -type ConfigPropertiesJira struct { - // ApiUrl The Jira instance URL. - ApiUrl string `json:"apiUrl"` - - // ProjectKey The Jira project key. - ProjectKey string `json:"projectKey"` -} - -// ConfigPropertiesOpsgenie Defines properties for connectors when type is `.opsgenie`. -type ConfigPropertiesOpsgenie struct { - // ApiUrl The Opsgenie URL. For example, `https://api.opsgenie.com` or `https://api.eu.opsgenie.com`. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - ApiUrl string `json:"apiUrl"` -} - -// ConfigPropertiesPagerduty Defines properties for connectors when type is `.pagerduty`. -type ConfigPropertiesPagerduty struct { - ApiUrl *string `json:"apiUrl"` -} - -// ConfigPropertiesResilient Defines properties for connectors when type is `.resilient`. -type ConfigPropertiesResilient struct { - // ApiUrl The IBM Resilient instance URL. - ApiUrl string `json:"apiUrl"` - - // OrgId The IBM Resilient organization ID. - OrgId string `json:"orgId"` -} - -// ConfigPropertiesServicenow Defines properties for connectors when type is `.servicenow`. -type ConfigPropertiesServicenow struct { - // ApiUrl The ServiceNow instance URL. - ApiUrl string `json:"apiUrl"` - - // ClientId The client ID assigned to your OAuth application. This property is required when `isOAuth` is `true`. - ClientId *string `json:"clientId,omitempty"` - - // IsOAuth The type of authentication to use. The default value is false, which means basic authentication is used instead of open authorization (OAuth). - IsOAuth *bool `json:"isOAuth,omitempty"` - - // JwtKeyId The key identifier assigned to the JWT verifier map of your OAuth application. This property is required when `isOAuth` is `true`. - JwtKeyId *string `json:"jwtKeyId,omitempty"` - - // UserIdentifierValue The identifier to use for OAuth authentication. This identifier should be the user field you selected when you created an OAuth JWT API endpoint for external clients in your ServiceNow instance. For example, if the selected user field is `Email`, the user identifier should be the user's email address. This property is required when `isOAuth` is `true`. - UserIdentifierValue *string `json:"userIdentifierValue,omitempty"` - - // UsesTableApi Determines whether the connector uses the Table API or the Import Set API. This property is supported only for ServiceNow ITSM and ServiceNow SecOps connectors. NOTE: If this property is set to `false`, the Elastic application should be installed in ServiceNow. - UsesTableApi *bool `json:"usesTableApi,omitempty"` -} - -// ConfigPropertiesServicenowItom Defines properties for connectors when type is `.servicenow`. -type ConfigPropertiesServicenowItom struct { - // ApiUrl The ServiceNow instance URL. - ApiUrl string `json:"apiUrl"` - - // ClientId The client ID assigned to your OAuth application. This property is required when `isOAuth` is `true`. - ClientId *string `json:"clientId,omitempty"` - - // IsOAuth The type of authentication to use. The default value is false, which means basic authentication is used instead of open authorization (OAuth). - IsOAuth *bool `json:"isOAuth,omitempty"` - - // JwtKeyId The key identifier assigned to the JWT verifier map of your OAuth application. This property is required when `isOAuth` is `true`. - JwtKeyId *string `json:"jwtKeyId,omitempty"` - - // UserIdentifierValue The identifier to use for OAuth authentication. This identifier should be the user field you selected when you created an OAuth JWT API endpoint for external clients in your ServiceNow instance. For example, if the selected user field is `Email`, the user identifier should be the user's email address. This property is required when `isOAuth` is `true`. - UserIdentifierValue *string `json:"userIdentifierValue,omitempty"` -} - -// ConfigPropertiesSwimlane Defines properties for connectors when type is `.swimlane`. -type ConfigPropertiesSwimlane struct { - // ApiUrl The Swimlane instance URL. - ApiUrl string `json:"apiUrl"` - - // AppId The Swimlane application ID. - AppId string `json:"appId"` - - // ConnectorType The type of connector. Valid values are `all`, `alerts`, and `cases`. - ConnectorType ConfigPropertiesSwimlaneConnectorType `json:"connectorType"` - - // Mappings The field mapping. - Mappings *ConfigPropertiesSwimlaneMappings `json:"mappings,omitempty"` -} - -// ConfigPropertiesSwimlaneConnectorType The type of connector. Valid values are `all`, `alerts`, and `cases`. -type ConfigPropertiesSwimlaneConnectorType string - -// ConfigPropertiesSwimlaneMappings The field mapping. -type ConfigPropertiesSwimlaneMappings struct { - // AlertIdConfig Mapping for the alert ID. - AlertIdConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` - - // Id The identifier for the field in Swimlane. - Id string `json:"id"` - - // Key The key for the field in Swimlane. - Key string `json:"key"` - - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"alertIdConfig,omitempty"` - - // CaseIdConfig Mapping for the case ID. - CaseIdConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` - - // Id The identifier for the field in Swimlane. - Id string `json:"id"` - - // Key The key for the field in Swimlane. - Key string `json:"key"` - - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"caseIdConfig,omitempty"` - - // CaseNameConfig Mapping for the case name. - CaseNameConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` - - // Id The identifier for the field in Swimlane. - Id string `json:"id"` - - // Key The key for the field in Swimlane. - Key string `json:"key"` - - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"caseNameConfig,omitempty"` - - // CommentsConfig Mapping for the case comments. - CommentsConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` - - // Id The identifier for the field in Swimlane. - Id string `json:"id"` - - // Key The key for the field in Swimlane. - Key string `json:"key"` - - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"commentsConfig,omitempty"` - - // DescriptionConfig Mapping for the case description. - DescriptionConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` - - // Id The identifier for the field in Swimlane. - Id string `json:"id"` - - // Key The key for the field in Swimlane. - Key string `json:"key"` - - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"descriptionConfig,omitempty"` - - // RuleNameConfig Mapping for the name of the alert's rule. - RuleNameConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` - - // Id The identifier for the field in Swimlane. - Id string `json:"id"` - - // Key The key for the field in Swimlane. - Key string `json:"key"` - - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"ruleNameConfig,omitempty"` - - // SeverityConfig Mapping for the severity. - SeverityConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` - - // Id The identifier for the field in Swimlane. - Id string `json:"id"` - - // Key The key for the field in Swimlane. - Key string `json:"key"` - - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"severityConfig,omitempty"` -} - -// ConfigPropertiesTines Defines properties for connectors when type is `.tines`. -type ConfigPropertiesTines struct { - Url string `json:"url"` -} - -// ConfigPropertiesWebhook Defines properties for connectors when type is `.webhook`. -type ConfigPropertiesWebhook struct { - HasAuth *bool `json:"hasAuth,omitempty"` - Headers *map[string]interface{} `json:"headers,omitempty"` - Method *ConfigPropertiesWebhookMethod `json:"method,omitempty"` - Url string `json:"url"` -} - -// ConfigPropertiesWebhookMethod defines model for ConfigPropertiesWebhook.Method. -type ConfigPropertiesWebhookMethod string - -// ConfigPropertiesXmatters Defines properties for connectors when type is `.xmatters`. -type ConfigPropertiesXmatters struct { - ConfigUrl *string `json:"configUrl"` - UsesBasic *bool `json:"usesBasic,omitempty"` -} - -// ConnectorResponseProperties The properties vary depending on the connector type. -type ConnectorResponseProperties struct { - union json.RawMessage -} - -// ConnectorResponsePropertiesCasesWebhook defines model for connector_response_properties_cases_webhook. -type ConnectorResponsePropertiesCasesWebhook struct { - // Config Defines properties for connectors when type is `.cases-webhook`. - Config ConfigPropertiesCasesWebhook `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesCasesWebhookConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesCasesWebhookConnectorTypeId The type of connector. -type ConnectorResponsePropertiesCasesWebhookConnectorTypeId string - -// ConnectorResponsePropertiesEmail defines model for connector_response_properties_email. -type ConnectorResponsePropertiesEmail struct { - // Config Defines properties for connectors when type is `.email`. - Config ConfigPropertiesEmail `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesEmailConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesEmailConnectorTypeId The type of connector. -type ConnectorResponsePropertiesEmailConnectorTypeId string - -// ConnectorResponsePropertiesGemini defines model for connector_response_properties_gemini. -type ConnectorResponsePropertiesGemini struct { - // Config Defines properties for connectors when type is `.gemini`. - Config ConfigPropertiesGemini `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesGeminiConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesGeminiConnectorTypeId The type of connector. -type ConnectorResponsePropertiesGeminiConnectorTypeId string - -// ConnectorResponsePropertiesIndex defines model for connector_response_properties_index. -type ConnectorResponsePropertiesIndex struct { - // Config Defines properties for connectors when type is `.index`. - Config ConfigPropertiesIndex `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesIndexConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesIndexConnectorTypeId The type of connector. -type ConnectorResponsePropertiesIndexConnectorTypeId string - -// ConnectorResponsePropertiesJira defines model for connector_response_properties_jira. -type ConnectorResponsePropertiesJira struct { - // Config Defines properties for connectors when type is `.jira`. - Config ConfigPropertiesJira `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesJiraConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesJiraConnectorTypeId The type of connector. -type ConnectorResponsePropertiesJiraConnectorTypeId string - -// ConnectorResponsePropertiesOpsgenie defines model for connector_response_properties_opsgenie. -type ConnectorResponsePropertiesOpsgenie struct { - // Config Defines properties for connectors when type is `.opsgenie`. - Config ConfigPropertiesOpsgenie `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesOpsgenieConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesOpsgenieConnectorTypeId The type of connector. -type ConnectorResponsePropertiesOpsgenieConnectorTypeId string - -// ConnectorResponsePropertiesPagerduty defines model for connector_response_properties_pagerduty. -type ConnectorResponsePropertiesPagerduty struct { - // Config Defines properties for connectors when type is `.pagerduty`. - Config ConfigPropertiesPagerduty `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesPagerdutyConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesPagerdutyConnectorTypeId The type of connector. -type ConnectorResponsePropertiesPagerdutyConnectorTypeId string - -// ConnectorResponsePropertiesResilient defines model for connector_response_properties_resilient. -type ConnectorResponsePropertiesResilient struct { - // Config Defines properties for connectors when type is `.resilient`. - Config ConfigPropertiesResilient `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesResilientConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesResilientConnectorTypeId The type of connector. -type ConnectorResponsePropertiesResilientConnectorTypeId string - -// ConnectorResponsePropertiesServerlog defines model for connector_response_properties_serverlog. -type ConnectorResponsePropertiesServerlog struct { - Config *map[string]interface{} `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesServerlogConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesServerlogConnectorTypeId The type of connector. -type ConnectorResponsePropertiesServerlogConnectorTypeId string - -// ConnectorResponsePropertiesServicenow defines model for connector_response_properties_servicenow. -type ConnectorResponsePropertiesServicenow struct { - // Config Defines properties for connectors when type is `.servicenow`. - Config ConfigPropertiesServicenow `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesServicenowConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesServicenowConnectorTypeId The type of connector. -type ConnectorResponsePropertiesServicenowConnectorTypeId string - -// ConnectorResponsePropertiesServicenowItom defines model for connector_response_properties_servicenow_itom. -type ConnectorResponsePropertiesServicenowItom struct { - // Config Defines properties for connectors when type is `.servicenow`. - Config ConfigPropertiesServicenowItom `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesServicenowItomConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesServicenowItomConnectorTypeId The type of connector. -type ConnectorResponsePropertiesServicenowItomConnectorTypeId string - -// ConnectorResponsePropertiesServicenowSir defines model for connector_response_properties_servicenow_sir. -type ConnectorResponsePropertiesServicenowSir struct { - // Config Defines properties for connectors when type is `.servicenow`. - Config ConfigPropertiesServicenow `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesServicenowSirConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesServicenowSirConnectorTypeId The type of connector. -type ConnectorResponsePropertiesServicenowSirConnectorTypeId string - -// ConnectorResponsePropertiesSlack defines model for connector_response_properties_slack. -type ConnectorResponsePropertiesSlack struct { - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesSlackConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesSlackConnectorTypeId The type of connector. -type ConnectorResponsePropertiesSlackConnectorTypeId string - -// ConnectorResponsePropertiesSlackApi defines model for connector_response_properties_slack_api. -type ConnectorResponsePropertiesSlackApi struct { - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesSlackApiConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesSlackApiConnectorTypeId The type of connector. -type ConnectorResponsePropertiesSlackApiConnectorTypeId string - -// ConnectorResponsePropertiesSwimlane defines model for connector_response_properties_swimlane. -type ConnectorResponsePropertiesSwimlane struct { - // Config Defines properties for connectors when type is `.swimlane`. - Config ConfigPropertiesSwimlane `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesSwimlaneConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesSwimlaneConnectorTypeId The type of connector. -type ConnectorResponsePropertiesSwimlaneConnectorTypeId string - -// ConnectorResponsePropertiesTeams defines model for connector_response_properties_teams. -type ConnectorResponsePropertiesTeams struct { - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesTeamsConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesTeamsConnectorTypeId The type of connector. -type ConnectorResponsePropertiesTeamsConnectorTypeId string - -// ConnectorResponsePropertiesTines defines model for connector_response_properties_tines. -type ConnectorResponsePropertiesTines struct { - // Config Defines properties for connectors when type is `.tines`. - Config ConfigPropertiesTines `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesTinesConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesTinesConnectorTypeId The type of connector. -type ConnectorResponsePropertiesTinesConnectorTypeId string - -// ConnectorResponsePropertiesWebhook defines model for connector_response_properties_webhook. -type ConnectorResponsePropertiesWebhook struct { - // Config Defines properties for connectors when type is `.webhook`. - Config ConfigPropertiesWebhook `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesWebhookConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesWebhookConnectorTypeId The type of connector. -type ConnectorResponsePropertiesWebhookConnectorTypeId string - -// ConnectorResponsePropertiesXmatters defines model for connector_response_properties_xmatters. -type ConnectorResponsePropertiesXmatters struct { - // Config Defines properties for connectors when type is `.xmatters`. - Config ConfigPropertiesXmatters `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId ConnectorResponsePropertiesXmattersConnectorTypeId `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// ConnectorResponsePropertiesXmattersConnectorTypeId The type of connector. -type ConnectorResponsePropertiesXmattersConnectorTypeId string - -// ConnectorTypes The type of connector. For example, `.email`, `.index`, `.jira`, `.opsgenie`, or `.server-log`. -type ConnectorTypes string - -// CreateConnectorRequestCasesWebhook The Webhook - Case Management connector uses axios to send POST, PUT, and GET requests to a case management RESTful API web service. -type CreateConnectorRequestCasesWebhook struct { - // Config Defines properties for connectors when type is `.cases-webhook`. - Config ConfigPropertiesCasesWebhook `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestCasesWebhookConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - Secrets *SecretsPropertiesCasesWebhook `json:"secrets,omitempty"` -} - -// CreateConnectorRequestCasesWebhookConnectorTypeId The type of connector. -type CreateConnectorRequestCasesWebhookConnectorTypeId string - -// CreateConnectorRequestEmail The email connector uses the SMTP protocol to send mail messages, using an integration of Nodemailer. An exception is Microsoft Exchange, which uses HTTP protocol for sending emails, Send mail. Email message text is sent as both plain text and html text. -type CreateConnectorRequestEmail struct { - // Config Defines properties for connectors when type is `.email`. - Config ConfigPropertiesEmail `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestEmailConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.email`. - Secrets SecretsPropertiesEmail `json:"secrets"` -} - -// CreateConnectorRequestEmailConnectorTypeId The type of connector. -type CreateConnectorRequestEmailConnectorTypeId string - -// CreateConnectorRequestGemini The Google Gemini connector uses axios to send a POST request to Google Gemini. -type CreateConnectorRequestGemini struct { - // Config Defines properties for connectors when type is `.gemini`. - Config ConfigPropertiesGemini `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestGeminiConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.gemini`. - Secrets SecretsPropertiesGemini `json:"secrets"` -} - -// CreateConnectorRequestGeminiConnectorTypeId The type of connector. -type CreateConnectorRequestGeminiConnectorTypeId string - -// CreateConnectorRequestIndex The index connector indexes a document into Elasticsearch. -type CreateConnectorRequestIndex struct { - // Config Defines properties for connectors when type is `.index`. - Config ConfigPropertiesIndex `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestIndexConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// CreateConnectorRequestIndexConnectorTypeId The type of connector. -type CreateConnectorRequestIndexConnectorTypeId string - -// CreateConnectorRequestJira The Jira connector uses the REST API v2 to create Jira issues. -type CreateConnectorRequestJira struct { - // Config Defines properties for connectors when type is `.jira`. - Config ConfigPropertiesJira `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestJiraConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.jira`. - Secrets SecretsPropertiesJira `json:"secrets"` -} - -// CreateConnectorRequestJiraConnectorTypeId The type of connector. -type CreateConnectorRequestJiraConnectorTypeId string - -// CreateConnectorRequestOpsgenie The Opsgenie connector uses the Opsgenie alert API. -type CreateConnectorRequestOpsgenie struct { - // Config Defines properties for connectors when type is `.opsgenie`. - Config ConfigPropertiesOpsgenie `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestOpsgenieConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.opsgenie`. - Secrets SecretsPropertiesOpsgenie `json:"secrets"` -} - -// CreateConnectorRequestOpsgenieConnectorTypeId The type of connector. -type CreateConnectorRequestOpsgenieConnectorTypeId string - -// CreateConnectorRequestPagerduty The PagerDuty connector uses the v2 Events API to trigger, acknowledge, and resolve PagerDuty alerts. -type CreateConnectorRequestPagerduty struct { - // Config Defines properties for connectors when type is `.pagerduty`. - Config ConfigPropertiesPagerduty `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestPagerdutyConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.pagerduty`. - Secrets SecretsPropertiesPagerduty `json:"secrets"` -} - -// CreateConnectorRequestPagerdutyConnectorTypeId The type of connector. -type CreateConnectorRequestPagerdutyConnectorTypeId string - -// CreateConnectorRequestResilient The IBM Resilient connector uses the RESILIENT REST v2 to create IBM Resilient incidents. -type CreateConnectorRequestResilient struct { - // Config Defines properties for connectors when type is `.resilient`. - Config ConfigPropertiesResilient `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestResilientConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.resilient`. - Secrets SecretsPropertiesResilient `json:"secrets"` -} - -// CreateConnectorRequestResilientConnectorTypeId The type of connector. -type CreateConnectorRequestResilientConnectorTypeId string - -// CreateConnectorRequestServerlog This connector writes an entry to the Kibana server log. -type CreateConnectorRequestServerlog struct { - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestServerlogConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// CreateConnectorRequestServerlogConnectorTypeId The type of connector. -type CreateConnectorRequestServerlogConnectorTypeId string - -// CreateConnectorRequestServicenow The ServiceNow ITSM connector uses the import set API to create ServiceNow incidents. You can use the connector for rule actions and cases. -type CreateConnectorRequestServicenow struct { - // Config Defines properties for connectors when type is `.servicenow`. - Config ConfigPropertiesServicenow `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestServicenowConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.servicenow`, `.servicenow-sir`, or `.servicenow-itom`. - Secrets SecretsPropertiesServicenow `json:"secrets"` -} - -// CreateConnectorRequestServicenowConnectorTypeId The type of connector. -type CreateConnectorRequestServicenowConnectorTypeId string - -// CreateConnectorRequestServicenowItom The ServiceNow ITOM connector uses the event API to create ServiceNow events. You can use the connector for rule actions. -type CreateConnectorRequestServicenowItom struct { - // Config Defines properties for connectors when type is `.servicenow`. - Config ConfigPropertiesServicenowItom `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestServicenowItomConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.servicenow`, `.servicenow-sir`, or `.servicenow-itom`. - Secrets SecretsPropertiesServicenow `json:"secrets"` -} - -// CreateConnectorRequestServicenowItomConnectorTypeId The type of connector. -type CreateConnectorRequestServicenowItomConnectorTypeId string - -// CreateConnectorRequestServicenowSir The ServiceNow SecOps connector uses the import set API to create ServiceNow security incidents. You can use the connector for rule actions and cases. -type CreateConnectorRequestServicenowSir struct { - // Config Defines properties for connectors when type is `.servicenow`. - Config ConfigPropertiesServicenow `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestServicenowSirConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.servicenow`, `.servicenow-sir`, or `.servicenow-itom`. - Secrets SecretsPropertiesServicenow `json:"secrets"` -} - -// CreateConnectorRequestServicenowSirConnectorTypeId The type of connector. -type CreateConnectorRequestServicenowSirConnectorTypeId string - -// CreateConnectorRequestSlack The Slack connector uses Slack Incoming Webhooks. -type CreateConnectorRequestSlack struct { - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestSlackConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.slack`. - Secrets SecretsPropertiesSlack `json:"secrets"` -} - -// CreateConnectorRequestSlackConnectorTypeId The type of connector. -type CreateConnectorRequestSlackConnectorTypeId string - -// CreateConnectorRequestSlackApi The Slack connector uses Slack Incoming Webhooks. -type CreateConnectorRequestSlackApi struct { - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestSlackApiConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.slack`. - Secrets SecretsPropertiesSlackApi `json:"secrets"` -} - -// CreateConnectorRequestSlackApiConnectorTypeId The type of connector. -type CreateConnectorRequestSlackApiConnectorTypeId string - -// CreateConnectorRequestSwimlane The Swimlane connector uses the Swimlane REST API to create Swimlane records. -type CreateConnectorRequestSwimlane struct { - // Config Defines properties for connectors when type is `.swimlane`. - Config ConfigPropertiesSwimlane `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestSwimlaneConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.swimlane`. - Secrets SecretsPropertiesSwimlane `json:"secrets"` -} - -// CreateConnectorRequestSwimlaneConnectorTypeId The type of connector. -type CreateConnectorRequestSwimlaneConnectorTypeId string - -// CreateConnectorRequestTeams The Microsoft Teams connector uses Incoming Webhooks. -type CreateConnectorRequestTeams struct { - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestTeamsConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.teams`. - Secrets SecretsPropertiesTeams `json:"secrets"` -} - -// CreateConnectorRequestTeamsConnectorTypeId The type of connector. -type CreateConnectorRequestTeamsConnectorTypeId string - -// CreateConnectorRequestTines The Tines connector uses Tines Webhook actions to send events via POST request. -type CreateConnectorRequestTines struct { - // Config Defines properties for connectors when type is `.tines`. - Config ConfigPropertiesTines `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestTinesConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.tines`. - Secrets SecretsPropertiesTines `json:"secrets"` -} - -// CreateConnectorRequestTinesConnectorTypeId The type of connector. -type CreateConnectorRequestTinesConnectorTypeId string - -// CreateConnectorRequestWebhook The Webhook connector uses axios to send a POST or PUT request to a web service. -type CreateConnectorRequestWebhook struct { - // Config Defines properties for connectors when type is `.webhook`. - Config ConfigPropertiesWebhook `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestWebhookConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.webhook`. - Secrets SecretsPropertiesWebhook `json:"secrets"` -} - -// CreateConnectorRequestWebhookConnectorTypeId The type of connector. -type CreateConnectorRequestWebhookConnectorTypeId string - -// CreateConnectorRequestXmatters The xMatters connector uses the xMatters Workflow for Elastic to send actionable alerts to on-call xMatters resources. -type CreateConnectorRequestXmatters struct { - // Config Defines properties for connectors when type is `.xmatters`. - Config ConfigPropertiesXmatters `json:"config"` - - // ConnectorTypeId The type of connector. - ConnectorTypeId CreateConnectorRequestXmattersConnectorTypeId `json:"connector_type_id"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.xmatters`. - Secrets SecretsPropertiesXmatters `json:"secrets"` -} - -// CreateConnectorRequestXmattersConnectorTypeId The type of connector. -type CreateConnectorRequestXmattersConnectorTypeId string - -// Features The feature that uses the connector. Valid values are `alerting`, `cases`, `uptime`, and `siem`. -type Features string - -// IsDeprecated Indicates whether the connector type is deprecated. -type IsDeprecated = bool - -// IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. -type IsMissingSecrets = bool - -// IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. -type IsPreconfigured = bool - -// LegacyRunConnectorGeneralResponse defines model for legacy_run_connector_general_response. -type LegacyRunConnectorGeneralResponse struct { - ActionId *string `json:"actionId,omitempty"` - Data *LegacyRunConnectorGeneralResponse_Data `json:"data,omitempty"` - - // Status The status of the action. - Status *string `json:"status,omitempty"` -} - -// LegacyRunConnectorGeneralResponseData0 Information returned from the action. -type LegacyRunConnectorGeneralResponseData0 map[string]interface{} - -// LegacyRunConnectorGeneralResponseData1 An array of information returned from the action. -type LegacyRunConnectorGeneralResponseData1 = []map[string]interface{} - -// LegacyRunConnectorGeneralResponse_Data defines model for LegacyRunConnectorGeneralResponse.Data. -type LegacyRunConnectorGeneralResponse_Data struct { - union json.RawMessage -} - -// ObjectNotFoundError defines model for object_not_found_error. -type ObjectNotFoundError struct { - Error *ObjectNotFoundErrorError `json:"error,omitempty"` - Message *string `json:"message,omitempty"` - StatusCode *ObjectNotFoundErrorStatusCode `json:"statusCode,omitempty"` -} - -// ObjectNotFoundErrorError defines model for ObjectNotFoundError.Error. -type ObjectNotFoundErrorError string - -// ObjectNotFoundErrorStatusCode defines model for ObjectNotFoundError.StatusCode. -type ObjectNotFoundErrorStatusCode int - -// RunConnectorGeneralResponse defines model for run_connector_general_response. -type RunConnectorGeneralResponse struct { - // ConnectorId The identifier for the connector. - ConnectorId string `json:"connector_id"` - Data *RunConnectorGeneralResponse_Data `json:"data,omitempty"` - - // Status The status of the action. - Status RunConnectorGeneralResponseStatus `json:"status"` -} - -// RunConnectorGeneralResponseData0 Information returned from the action. -type RunConnectorGeneralResponseData0 map[string]interface{} - -// RunConnectorGeneralResponseData1 An array of information returned from the action. -type RunConnectorGeneralResponseData1 = []map[string]interface{} - -// RunConnectorGeneralResponse_Data defines model for RunConnectorGeneralResponse.Data. -type RunConnectorGeneralResponse_Data struct { - union json.RawMessage -} - -// RunConnectorGeneralResponseStatus The status of the action. -type RunConnectorGeneralResponseStatus string - -// RunConnectorParamsDocuments Test an action that indexes a document into Elasticsearch. -type RunConnectorParamsDocuments struct { - // Documents The documents in JSON format for index connectors. - Documents []map[string]interface{} `json:"documents"` -} - -// RunConnectorParamsLevelMessage Test an action that writes an entry to the Kibana server log. -type RunConnectorParamsLevelMessage struct { - // Level The log level of the message for server log connectors. - Level *RunConnectorParamsLevelMessageLevel `json:"level,omitempty"` - - // Message The message for server log connectors. - Message string `json:"message"` -} - -// RunConnectorParamsLevelMessageLevel The log level of the message for server log connectors. -type RunConnectorParamsLevelMessageLevel string - -// RunConnectorSubactionAddevent The `addEvent` subaction for ServiceNow ITOM connectors. -type RunConnectorSubactionAddevent struct { - // SubAction The action to test. - SubAction RunConnectorSubactionAddeventSubAction `json:"subAction"` - - // SubActionParams The set of configuration properties for the action. - SubActionParams *struct { - // AdditionalInfo Additional information about the event. - AdditionalInfo *string `json:"additional_info,omitempty"` - - // Description The details about the event. - Description *string `json:"description,omitempty"` - - // EventClass A specific instance of the source. - EventClass *string `json:"event_class,omitempty"` - - // MessageKey All actions sharing this key are associated with the same ServiceNow alert. The default value is `:`. - MessageKey *string `json:"message_key,omitempty"` - - // MetricName The name of the metric. - MetricName *string `json:"metric_name,omitempty"` - - // Node The host that the event was triggered for. - Node *string `json:"node,omitempty"` - - // Resource The name of the resource. - Resource *string `json:"resource,omitempty"` - - // Severity The severity of the event. - Severity *string `json:"severity,omitempty"` - - // Source The name of the event source type. - Source *string `json:"source,omitempty"` - - // TimeOfEvent The time of the event. - TimeOfEvent *string `json:"time_of_event,omitempty"` - - // Type The type of event. - Type *string `json:"type,omitempty"` - } `json:"subActionParams,omitempty"` -} - -// RunConnectorSubactionAddeventSubAction The action to test. -type RunConnectorSubactionAddeventSubAction string - -// RunConnectorSubactionClosealert The `closeAlert` subaction for Opsgenie connectors. -type RunConnectorSubactionClosealert struct { - // SubAction The action to test. - SubAction RunConnectorSubactionClosealertSubAction `json:"subAction"` - SubActionParams struct { - // Alias The unique identifier used for alert deduplication in Opsgenie. The alias must match the value used when creating the alert. - Alias string `json:"alias"` - - // Note Additional information for the alert. - Note *string `json:"note,omitempty"` - - // Source The display name for the source of the alert. - Source *string `json:"source,omitempty"` - - // User The display name for the owner. - User *string `json:"user,omitempty"` - } `json:"subActionParams"` -} - -// RunConnectorSubactionClosealertSubAction The action to test. -type RunConnectorSubactionClosealertSubAction string - -// RunConnectorSubactionCreatealert The `createAlert` subaction for Opsgenie connectors. -type RunConnectorSubactionCreatealert struct { - // SubAction The action to test. - SubAction RunConnectorSubactionCreatealertSubAction `json:"subAction"` - SubActionParams struct { - // Actions The custom actions available to the alert. - Actions *[]string `json:"actions,omitempty"` - - // Alias The unique identifier used for alert deduplication in Opsgenie. - Alias *string `json:"alias,omitempty"` - - // Description A description that provides detailed information about the alert. - Description *string `json:"description,omitempty"` - - // Details The custom properties of the alert. - Details *map[string]interface{} `json:"details,omitempty"` - - // Entity The domain of the alert. For example, the application or server name. - Entity *string `json:"entity,omitempty"` - - // Message The alert message. - Message string `json:"message"` - - // Note Additional information for the alert. - Note *string `json:"note,omitempty"` - - // Priority The priority level for the alert. - Priority *RunConnectorSubactionCreatealertSubActionParamsPriority `json:"priority,omitempty"` - - // Responders The entities to receive notifications about the alert. If `type` is `user`, either `id` or `username` is required. If `type` is `team`, either `id` or `name` is required. - Responders *[]struct { - // Id The identifier for the entity. - Id *string `json:"id,omitempty"` - - // Name The name of the entity. - Name *string `json:"name,omitempty"` - - // Type The type of responders, in this case `escalation`. - Type *RunConnectorSubactionCreatealertSubActionParamsRespondersType `json:"type,omitempty"` - - // Username A valid email address for the user. - Username *string `json:"username,omitempty"` - } `json:"responders,omitempty"` - - // Source The display name for the source of the alert. - Source *string `json:"source,omitempty"` - - // Tags The tags for the alert. - Tags *[]string `json:"tags,omitempty"` - - // User The display name for the owner. - User *string `json:"user,omitempty"` - - // VisibleTo The teams and users that the alert will be visible to without sending a notification. Only one of `id`, `name`, or `username` is required. - VisibleTo *[]struct { - // Id The identifier for the entity. - Id *string `json:"id,omitempty"` - - // Name The name of the entity. - Name *string `json:"name,omitempty"` - - // Type Valid values are `team` and `user`. - Type RunConnectorSubactionCreatealertSubActionParamsVisibleToType `json:"type"` - - // Username The user name. This property is required only when the `type` is `user`. - Username *string `json:"username,omitempty"` - } `json:"visibleTo,omitempty"` - } `json:"subActionParams"` -} - -// RunConnectorSubactionCreatealertSubAction The action to test. -type RunConnectorSubactionCreatealertSubAction string - -// RunConnectorSubactionCreatealertSubActionParamsPriority The priority level for the alert. -type RunConnectorSubactionCreatealertSubActionParamsPriority string - -// RunConnectorSubactionCreatealertSubActionParamsRespondersType The type of responders, in this case `escalation`. -type RunConnectorSubactionCreatealertSubActionParamsRespondersType string - -// RunConnectorSubactionCreatealertSubActionParamsVisibleToType Valid values are `team` and `user`. -type RunConnectorSubactionCreatealertSubActionParamsVisibleToType string - -// RunConnectorSubactionFieldsbyissuetype The `fieldsByIssueType` subaction for Jira connectors. -type RunConnectorSubactionFieldsbyissuetype struct { - // SubAction The action to test. - SubAction RunConnectorSubactionFieldsbyissuetypeSubAction `json:"subAction"` - SubActionParams struct { - // Id The Jira issue type identifier. - Id string `json:"id"` - } `json:"subActionParams"` -} - -// RunConnectorSubactionFieldsbyissuetypeSubAction The action to test. -type RunConnectorSubactionFieldsbyissuetypeSubAction string - -// RunConnectorSubactionGetchoices The `getChoices` subaction for ServiceNow ITOM, ServiceNow ITSM, and ServiceNow SecOps connectors. -type RunConnectorSubactionGetchoices struct { - // SubAction The action to test. - SubAction RunConnectorSubactionGetchoicesSubAction `json:"subAction"` - - // SubActionParams The set of configuration properties for the action. - SubActionParams struct { - // Fields An array of fields. - Fields []string `json:"fields"` - } `json:"subActionParams"` -} - -// RunConnectorSubactionGetchoicesSubAction The action to test. -type RunConnectorSubactionGetchoicesSubAction string - -// RunConnectorSubactionGetfields The `getFields` subaction for Jira, ServiceNow ITSM, and ServiceNow SecOps connectors. -type RunConnectorSubactionGetfields struct { - // SubAction The action to test. - SubAction RunConnectorSubactionGetfieldsSubAction `json:"subAction"` -} - -// RunConnectorSubactionGetfieldsSubAction The action to test. -type RunConnectorSubactionGetfieldsSubAction string - -// RunConnectorSubactionGetincident The `getIncident` subaction for Jira, ServiceNow ITSM, and ServiceNow SecOps connectors. -type RunConnectorSubactionGetincident struct { - // SubAction The action to test. - SubAction RunConnectorSubactionGetincidentSubAction `json:"subAction"` - SubActionParams struct { - // ExternalId The Jira, ServiceNow ITSM, or ServiceNow SecOps issue identifier. - ExternalId string `json:"externalId"` - } `json:"subActionParams"` -} - -// RunConnectorSubactionGetincidentSubAction The action to test. -type RunConnectorSubactionGetincidentSubAction string - -// RunConnectorSubactionIssue The `issue` subaction for Jira connectors. -type RunConnectorSubactionIssue struct { - // SubAction The action to test. - SubAction RunConnectorSubactionIssueSubAction `json:"subAction"` - SubActionParams *struct { - // Id The Jira issue identifier. - Id string `json:"id"` - } `json:"subActionParams,omitempty"` -} - -// RunConnectorSubactionIssueSubAction The action to test. -type RunConnectorSubactionIssueSubAction string - -// RunConnectorSubactionIssues The `issues` subaction for Jira connectors. -type RunConnectorSubactionIssues struct { - // SubAction The action to test. - SubAction RunConnectorSubactionIssuesSubAction `json:"subAction"` - SubActionParams struct { - // Title The title of the Jira issue. - Title string `json:"title"` - } `json:"subActionParams"` -} - -// RunConnectorSubactionIssuesSubAction The action to test. -type RunConnectorSubactionIssuesSubAction string - -// RunConnectorSubactionIssuetypes The `issueTypes` subaction for Jira connectors. -type RunConnectorSubactionIssuetypes struct { - // SubAction The action to test. - SubAction RunConnectorSubactionIssuetypesSubAction `json:"subAction"` -} - -// RunConnectorSubactionIssuetypesSubAction The action to test. -type RunConnectorSubactionIssuetypesSubAction string - -// RunConnectorSubactionPushtoservice The `pushToService` subaction for Jira, ServiceNow ITSM, ServiceNow SecOps, and Swimlane connectors. -type RunConnectorSubactionPushtoservice struct { - // SubAction The action to test. - SubAction RunConnectorSubactionPushtoserviceSubAction `json:"subAction"` - - // SubActionParams The set of configuration properties for the action. - SubActionParams struct { - // Comments Additional information that is sent to Jira, ServiceNow ITSM, ServiceNow SecOps, or Swimlane. - Comments *[]struct { - // Comment A comment related to the incident. For example, describe how to troubleshoot the issue. - Comment *string `json:"comment,omitempty"` - - // CommentId A unique identifier for the comment. - CommentId *int `json:"commentId,omitempty"` - } `json:"comments,omitempty"` - - // Incident Information necessary to create or update a Jira, ServiceNow ITSM, ServiveNow SecOps, or Swimlane incident. - Incident *struct { - // AlertId The alert identifier for Swimlane connectors. - AlertId *string `json:"alertId,omitempty"` - - // CaseId The case identifier for the incident for Swimlane connectors. - CaseId *string `json:"caseId,omitempty"` - - // CaseName The case name for the incident for Swimlane connectors. - CaseName *string `json:"caseName,omitempty"` - - // Category The category of the incident for ServiceNow ITSM and ServiceNow SecOps connectors. - Category *string `json:"category,omitempty"` - - // CorrelationDisplay A descriptive label of the alert for correlation purposes for ServiceNow ITSM and ServiceNow SecOps connectors. - CorrelationDisplay *string `json:"correlation_display,omitempty"` - - // CorrelationId The correlation identifier for the security incident for ServiceNow ITSM and ServiveNow SecOps connectors. Connectors using the same correlation ID are associated with the same ServiceNow incident. This value determines whether a new ServiceNow incident is created or an existing one is updated. Modifying this value is optional; if not modified, the rule ID and alert ID are combined as `{{ruleID}}:{{alert ID}}` to form the correlation ID value in ServiceNow. The maximum character length for this value is 100 characters. NOTE: Using the default configuration of `{{ruleID}}:{{alert ID}}` ensures that ServiceNow creates a separate incident record for every generated alert that uses a unique alert ID. If the rule generates multiple alerts that use the same alert IDs, ServiceNow creates and continually updates a single incident record for the alert. - CorrelationId *string `json:"correlation_id,omitempty"` - - // Description The description of the incident for Jira, ServiceNow ITSM, ServiceNow SecOps, and Swimlane connectors. - Description *string `json:"description,omitempty"` - - // DestIp A list of destination IP addresses related to the security incident for ServiceNow SecOps connectors. The IPs are added as observables to the security incident. - DestIp *RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp `json:"dest_ip,omitempty"` - - // ExternalId The Jira, ServiceNow ITSM, or ServiceNow SecOps issue identifier. If present, the incident is updated. Otherwise, a new incident is created. - ExternalId *string `json:"externalId,omitempty"` - - // Impact The impact of the incident for ServiceNow ITSM connectors. - Impact *string `json:"impact,omitempty"` - - // IssueType The type of incident for Jira connectors. For example, 10006. To obtain the list of valid values, set `subAction` to `issueTypes`. - IssueType *int `json:"issueType,omitempty"` - - // Labels The labels for the incident for Jira connectors. NOTE: Labels cannot contain spaces. - Labels *[]string `json:"labels,omitempty"` - - // MalwareHash A list of malware hashes related to the security incident for ServiceNow SecOps connectors. The hashes are added as observables to the security incident. - MalwareHash *RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash `json:"malware_hash,omitempty"` - - // MalwareUrl A list of malware URLs related to the security incident for ServiceNow SecOps connectors. The URLs are added as observables to the security incident. - MalwareUrl *string `json:"malware_url,omitempty"` - - // Parent The ID or key of the parent issue for Jira connectors. Applies only to `Sub-task` types of issues. - Parent *string `json:"parent,omitempty"` - - // Priority The priority of the incident in Jira and ServiceNow SecOps connectors. - Priority *string `json:"priority,omitempty"` - - // RuleName The rule name for Swimlane connectors. - RuleName *string `json:"ruleName,omitempty"` - - // Severity The severity of the incident for ServiceNow ITSM and Swimlane connectors. - Severity *string `json:"severity,omitempty"` - - // ShortDescription A short description of the incident for ServiceNow ITSM and ServiceNow SecOps connectors. It is used for searching the contents of the knowledge base. - ShortDescription *string `json:"short_description,omitempty"` - - // SourceIp A list of source IP addresses related to the security incident for ServiceNow SecOps connectors. The IPs are added as observables to the security incident. - SourceIp *RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp `json:"source_ip,omitempty"` - - // Subcategory The subcategory of the incident for ServiceNow ITSM and ServiceNow SecOps connectors. - Subcategory *string `json:"subcategory,omitempty"` - - // Summary A summary of the incident for Jira connectors. - Summary *string `json:"summary,omitempty"` - - // Title A title for the incident for Jira connectors. It is used for searching the contents of the knowledge base. - Title *string `json:"title,omitempty"` - - // Urgency The urgency of the incident for ServiceNow ITSM connectors. - Urgency *string `json:"urgency,omitempty"` - } `json:"incident,omitempty"` - } `json:"subActionParams"` -} - -// RunConnectorSubactionPushtoserviceSubAction The action to test. -type RunConnectorSubactionPushtoserviceSubAction string - -// RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0 defines model for . -type RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0 = string - -// RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1 defines model for . -type RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1 = []string - -// RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp A list of destination IP addresses related to the security incident for ServiceNow SecOps connectors. The IPs are added as observables to the security incident. -type RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp struct { - union json.RawMessage -} - -// RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0 defines model for . -type RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0 = string - -// RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1 defines model for . -type RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1 = []string - -// RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash A list of malware hashes related to the security incident for ServiceNow SecOps connectors. The hashes are added as observables to the security incident. -type RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash struct { - union json.RawMessage -} - -// RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0 defines model for . -type RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0 = string - -// RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1 defines model for . -type RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1 = []string - -// RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp A list of source IP addresses related to the security incident for ServiceNow SecOps connectors. The IPs are added as observables to the security incident. -type RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp struct { - union json.RawMessage -} - -// SecretsPropertiesCasesWebhook defines model for secrets_properties_cases_webhook. -type SecretsPropertiesCasesWebhook struct { - // Password The password for HTTP basic authentication. If `hasAuth` is set to `true`, this property is required. - Password *string `json:"password,omitempty"` - - // User The username for HTTP basic authentication. If `hasAuth` is set to `true`, this property is required. - User *string `json:"user,omitempty"` -} - -// SecretsPropertiesEmail Defines secrets for connectors when type is `.email`. -type SecretsPropertiesEmail map[string]interface{} - -// SecretsPropertiesGemini Defines secrets for connectors when type is `.gemini`. -type SecretsPropertiesGemini struct { - // CredentialsJson The service account credentials JSON file. The service account should have Vertex AI user IAM role assigned to it. - CredentialsJson string `json:"credentialsJson"` -} - -// SecretsPropertiesJira Defines secrets for connectors when type is `.jira`. -type SecretsPropertiesJira struct { - // ApiToken The Jira API authentication token for HTTP basic authentication. - ApiToken string `json:"apiToken"` - - // Email The account email for HTTP Basic authentication. - Email string `json:"email"` -} - -// SecretsPropertiesOpsgenie Defines secrets for connectors when type is `.opsgenie`. -type SecretsPropertiesOpsgenie struct { - // ApiKey The Opsgenie API authentication key for HTTP Basic authentication. - ApiKey string `json:"apiKey"` -} - -// SecretsPropertiesPagerduty Defines secrets for connectors when type is `.pagerduty`. -type SecretsPropertiesPagerduty map[string]interface{} - -// SecretsPropertiesResilient Defines secrets for connectors when type is `.resilient`. -type SecretsPropertiesResilient struct { - // ApiKeyId The authentication key ID for HTTP Basic authentication. - ApiKeyId string `json:"apiKeyId"` - - // ApiKeySecret The authentication key secret for HTTP Basic authentication. - ApiKeySecret string `json:"apiKeySecret"` -} - -// SecretsPropertiesServicenow Defines secrets for connectors when type is `.servicenow`, `.servicenow-sir`, or `.servicenow-itom`. -type SecretsPropertiesServicenow struct { - // ClientSecret The client secret assigned to your OAuth application. This property is required when `isOAuth` is `true`. - ClientSecret *string `json:"clientSecret,omitempty"` - - // Password The password for HTTP basic authentication. This property is required when `isOAuth` is `false`. - Password *string `json:"password,omitempty"` - - // PrivateKey The RSA private key that you created for use in ServiceNow. This property is required when `isOAuth` is `true`. - PrivateKey *string `json:"privateKey,omitempty"` - - // PrivateKeyPassword The password for the RSA private key. This property is required when `isOAuth` is `true` and you set a password on your private key. - PrivateKeyPassword *string `json:"privateKeyPassword,omitempty"` - - // Username The username for HTTP basic authentication. This property is required when `isOAuth` is `false`. - Username *string `json:"username,omitempty"` -} - -// SecretsPropertiesSlack Defines secrets for connectors when type is `.slack`. -type SecretsPropertiesSlack map[string]interface{} - -// SecretsPropertiesSlackApi Defines secrets for connectors when type is `.slack`. -type SecretsPropertiesSlackApi struct { - // Token Slack bot user OAuth token. - Token string `json:"token"` -} - -// SecretsPropertiesSwimlane Defines secrets for connectors when type is `.swimlane`. -type SecretsPropertiesSwimlane struct { - // ApiToken Swimlane API authentication token. - ApiToken *string `json:"apiToken,omitempty"` -} - -// SecretsPropertiesTeams Defines secrets for connectors when type is `.teams`. -type SecretsPropertiesTeams map[string]interface{} - -// SecretsPropertiesTines Defines secrets for connectors when type is `.tines`. -type SecretsPropertiesTines map[string]interface{} - -// SecretsPropertiesWebhook Defines secrets for connectors when type is `.webhook`. -type SecretsPropertiesWebhook map[string]interface{} - -// SecretsPropertiesXmatters Defines secrets for connectors when type is `.xmatters`. -type SecretsPropertiesXmatters map[string]interface{} - -// UpdateConnectorRequestCasesWebhook defines model for update_connector_request_cases_webhook. -type UpdateConnectorRequestCasesWebhook struct { - // Config Defines properties for connectors when type is `.cases-webhook`. - Config ConfigPropertiesCasesWebhook `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - Secrets *SecretsPropertiesCasesWebhook `json:"secrets,omitempty"` -} - -// UpdateConnectorRequestEmail defines model for update_connector_request_email. -type UpdateConnectorRequestEmail struct { - // Config Defines properties for connectors when type is `.email`. - Config ConfigPropertiesEmail `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.email`. - Secrets SecretsPropertiesEmail `json:"secrets"` -} - -// UpdateConnectorRequestGemini defines model for update_connector_request_gemini. -type UpdateConnectorRequestGemini struct { - // Config Defines properties for connectors when type is `.gemini`. - Config ConfigPropertiesGemini `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.gemini`. - Secrets *SecretsPropertiesGemini `json:"secrets,omitempty"` -} - -// UpdateConnectorRequestIndex defines model for update_connector_request_index. -type UpdateConnectorRequestIndex struct { - // Config Defines properties for connectors when type is `.index`. - Config ConfigPropertiesIndex `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` -} - -// UpdateConnectorRequestJira defines model for update_connector_request_jira. -type UpdateConnectorRequestJira struct { - // Config Defines properties for connectors when type is `.jira`. - Config ConfigPropertiesJira `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.jira`. - Secrets SecretsPropertiesJira `json:"secrets"` -} - -// UpdateConnectorRequestOpsgenie defines model for update_connector_request_opsgenie. -type UpdateConnectorRequestOpsgenie struct { - // Config Defines properties for connectors when type is `.opsgenie`. - Config ConfigPropertiesOpsgenie `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.opsgenie`. - Secrets SecretsPropertiesOpsgenie `json:"secrets"` -} - -// UpdateConnectorRequestPagerduty The PagerDuty connector uses the v2 Events API to trigger, acknowledge, and resolve PagerDuty alerts. -type UpdateConnectorRequestPagerduty struct { - // Config Defines properties for connectors when type is `.pagerduty`. - Config ConfigPropertiesPagerduty `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.pagerduty`. - Secrets SecretsPropertiesPagerduty `json:"secrets"` -} - -// UpdateConnectorRequestResilient defines model for update_connector_request_resilient. -type UpdateConnectorRequestResilient struct { - // Config Defines properties for connectors when type is `.resilient`. - Config ConfigPropertiesResilient `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.resilient`. - Secrets SecretsPropertiesResilient `json:"secrets"` -} - -// UpdateConnectorRequestServerlog defines model for update_connector_request_serverlog. -type UpdateConnectorRequestServerlog struct { - // Name The display name for the connector. - Name string `json:"name"` -} - -// UpdateConnectorRequestServicenow defines model for update_connector_request_servicenow. -type UpdateConnectorRequestServicenow struct { - // Config Defines properties for connectors when type is `.servicenow`. - Config ConfigPropertiesServicenow `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.servicenow`, `.servicenow-sir`, or `.servicenow-itom`. - Secrets SecretsPropertiesServicenow `json:"secrets"` -} - -// UpdateConnectorRequestServicenowItom defines model for update_connector_request_servicenow_itom. -type UpdateConnectorRequestServicenowItom struct { - // Config Defines properties for connectors when type is `.servicenow`. - Config ConfigPropertiesServicenowItom `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.servicenow`, `.servicenow-sir`, or `.servicenow-itom`. - Secrets SecretsPropertiesServicenow `json:"secrets"` -} - -// UpdateConnectorRequestServicenowSir The ServiceNow SecOps connector uses the import set API to create ServiceNow security incidents. You can use the connector for rule actions and cases. -type UpdateConnectorRequestServicenowSir struct { - // Config Defines properties for connectors when type is `.servicenow`. - Config ConfigPropertiesServicenow `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.servicenow`, `.servicenow-sir`, or `.servicenow-itom`. - Secrets SecretsPropertiesServicenow `json:"secrets"` -} - -// UpdateConnectorRequestSlack The Slack connector uses Slack Incoming Webhooks. -type UpdateConnectorRequestSlack struct { - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.slack`. - Secrets SecretsPropertiesSlack `json:"secrets"` -} - -// UpdateConnectorRequestSlackApi defines model for update_connector_request_slack_api. -type UpdateConnectorRequestSlackApi struct { - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.slack`. - Secrets SecretsPropertiesSlackApi `json:"secrets"` -} - -// UpdateConnectorRequestSwimlane defines model for update_connector_request_swimlane. -type UpdateConnectorRequestSwimlane struct { - // Config Defines properties for connectors when type is `.swimlane`. - Config ConfigPropertiesSwimlane `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.swimlane`. - Secrets SecretsPropertiesSwimlane `json:"secrets"` -} - -// UpdateConnectorRequestTeams The Microsoft Teams connector uses Incoming Webhooks. -type UpdateConnectorRequestTeams struct { - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.teams`. - Secrets SecretsPropertiesTeams `json:"secrets"` -} - -// UpdateConnectorRequestTines The Tines connector uses Tines Webhook actions to send events via POST request. -type UpdateConnectorRequestTines struct { - // Config Defines properties for connectors when type is `.tines`. - Config ConfigPropertiesTines `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.tines`. - Secrets SecretsPropertiesTines `json:"secrets"` -} - -// UpdateConnectorRequestWebhook The Webhook connector uses axios to send a POST or PUT request to a web service. -type UpdateConnectorRequestWebhook struct { - // Config Defines properties for connectors when type is `.webhook`. - Config ConfigPropertiesWebhook `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.webhook`. - Secrets SecretsPropertiesWebhook `json:"secrets"` -} - -// UpdateConnectorRequestXmatters The xMatters connector uses the xMatters Workflow for Elastic to send actionable alerts to on-call xMatters resources. -type UpdateConnectorRequestXmatters struct { - // Config Defines properties for connectors when type is `.xmatters`. - Config ConfigPropertiesXmatters `json:"config"` - - // Name The display name for the connector. - Name string `json:"name"` - - // Secrets Defines secrets for connectors when type is `.xmatters`. - Secrets SecretsPropertiesXmatters `json:"secrets"` -} - -// ActionId defines model for action_id. -type ActionId = string - -// ConnectorId defines model for connector_id. -type ConnectorId = string - -// KbnXsrf defines model for kbn_xsrf. -type KbnXsrf = string - -// SpaceId defines model for space_id. -type SpaceId = string - -// N200Actions The properties vary depending on the action type. -type N200Actions = ActionResponseProperties - -// LegacyCreateConnectorJSONBody defines parameters for LegacyCreateConnector. -type LegacyCreateConnectorJSONBody struct { - // ActionTypeId The connector type identifier. - ActionTypeId *string `json:"actionTypeId,omitempty"` - - // Config The configuration for the connector. Configuration properties vary depending on the connector type. - Config *map[string]interface{} `json:"config,omitempty"` - - // Name The display name for the connector. - Name *string `json:"name,omitempty"` - - // Secrets The secrets configuration for the connector. Secrets configuration properties vary depending on the connector type. NOTE: Remember these values. You must provide them each time you update the connector. - Secrets *map[string]interface{} `json:"secrets,omitempty"` -} - -// LegacyCreateConnectorParams defines parameters for LegacyCreateConnector. -type LegacyCreateConnectorParams struct { - // KbnXsrf Cross-site request forgery protection - KbnXsrf KbnXsrf `json:"kbn-xsrf"` -} - -// LegacyDeleteConnectorParams defines parameters for LegacyDeleteConnector. -type LegacyDeleteConnectorParams struct { - // KbnXsrf Cross-site request forgery protection - KbnXsrf KbnXsrf `json:"kbn-xsrf"` -} - -// LegacyUpdateConnectorJSONBody defines parameters for LegacyUpdateConnector. -type LegacyUpdateConnectorJSONBody struct { - // Config The new connector configuration. Configuration properties vary depending on the connector type. - Config *map[string]interface{} `json:"config,omitempty"` - - // Name The new name for the connector. - Name *string `json:"name,omitempty"` - - // Secrets The updated secrets configuration for the connector. Secrets properties vary depending on the connector type. - Secrets *map[string]interface{} `json:"secrets,omitempty"` -} - -// LegacyUpdateConnectorParams defines parameters for LegacyUpdateConnector. -type LegacyUpdateConnectorParams struct { - // KbnXsrf Cross-site request forgery protection - KbnXsrf KbnXsrf `json:"kbn-xsrf"` -} - -// LegacyRunConnectorJSONBody defines parameters for LegacyRunConnector. -type LegacyRunConnectorJSONBody struct { - // Params The parameters of the connector. Parameter properties vary depending on the connector type. - Params map[string]interface{} `json:"params"` -} - -// LegacyRunConnectorParams defines parameters for LegacyRunConnector. -type LegacyRunConnectorParams struct { - // KbnXsrf Cross-site request forgery protection - KbnXsrf KbnXsrf `json:"kbn-xsrf"` -} - -// CreateConnectorJSONBody defines parameters for CreateConnector. -type CreateConnectorJSONBody struct { - union json.RawMessage -} - -// CreateConnectorParams defines parameters for CreateConnector. -type CreateConnectorParams struct { - // KbnXsrf Cross-site request forgery protection - KbnXsrf KbnXsrf `json:"kbn-xsrf"` -} - -// DeleteConnectorParams defines parameters for DeleteConnector. -type DeleteConnectorParams struct { - // KbnXsrf Cross-site request forgery protection - KbnXsrf KbnXsrf `json:"kbn-xsrf"` -} - -// UpdateConnectorJSONBody defines parameters for UpdateConnector. -type UpdateConnectorJSONBody struct { - union json.RawMessage -} - -// UpdateConnectorParams defines parameters for UpdateConnector. -type UpdateConnectorParams struct { - // KbnXsrf Cross-site request forgery protection - KbnXsrf KbnXsrf `json:"kbn-xsrf"` -} - -// RunConnectorJSONBody defines parameters for RunConnector. -type RunConnectorJSONBody struct { - Params RunConnectorJSONBody_Params `json:"params"` -} - -// RunConnectorParams defines parameters for RunConnector. -type RunConnectorParams struct { - // KbnXsrf Cross-site request forgery protection - KbnXsrf KbnXsrf `json:"kbn-xsrf"` -} - -// RunConnectorJSONBodyParams2 defines parameters for RunConnector. -type RunConnectorJSONBodyParams2 struct { - union json.RawMessage -} - -// RunConnectorJSONBody_Params defines parameters for RunConnector. -type RunConnectorJSONBody_Params struct { - union json.RawMessage -} - -// GetConnectorTypesParams defines parameters for GetConnectorTypes. -type GetConnectorTypesParams struct { - // FeatureId A filter to limit the retrieved connector types to those that support a specific feature (such as alerting or cases). - FeatureId *Features `form:"feature_id,omitempty" json:"feature_id,omitempty"` -} - -// LegacyCreateConnectorJSONRequestBody defines body for LegacyCreateConnector for application/json ContentType. -type LegacyCreateConnectorJSONRequestBody LegacyCreateConnectorJSONBody - -// LegacyUpdateConnectorJSONRequestBody defines body for LegacyUpdateConnector for application/json ContentType. -type LegacyUpdateConnectorJSONRequestBody LegacyUpdateConnectorJSONBody - -// LegacyRunConnectorJSONRequestBody defines body for LegacyRunConnector for application/json ContentType. -type LegacyRunConnectorJSONRequestBody LegacyRunConnectorJSONBody - -// CreateConnectorJSONRequestBody defines body for CreateConnector for application/json ContentType. -type CreateConnectorJSONRequestBody CreateConnectorJSONBody - -// UpdateConnectorJSONRequestBody defines body for UpdateConnector for application/json ContentType. -type UpdateConnectorJSONRequestBody UpdateConnectorJSONBody - -// RunConnectorJSONRequestBody defines body for RunConnector for application/json ContentType. -type RunConnectorJSONRequestBody RunConnectorJSONBody - -// AsConnectorResponsePropertiesCasesWebhook returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesCasesWebhook -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesCasesWebhook() (ConnectorResponsePropertiesCasesWebhook, error) { - var body ConnectorResponsePropertiesCasesWebhook - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesCasesWebhook overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesCasesWebhook -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesCasesWebhook(v ConnectorResponsePropertiesCasesWebhook) error { - v.ConnectorTypeId = ".cases-webhook" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesCasesWebhook performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesCasesWebhook -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesCasesWebhook(v ConnectorResponsePropertiesCasesWebhook) error { - v.ConnectorTypeId = ".cases-webhook" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesEmail returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesEmail -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesEmail() (ConnectorResponsePropertiesEmail, error) { - var body ConnectorResponsePropertiesEmail - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesEmail overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesEmail -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesEmail(v ConnectorResponsePropertiesEmail) error { - v.ConnectorTypeId = ".email" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesEmail performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesEmail -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesEmail(v ConnectorResponsePropertiesEmail) error { - v.ConnectorTypeId = ".email" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesGemini returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesGemini -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesGemini() (ConnectorResponsePropertiesGemini, error) { - var body ConnectorResponsePropertiesGemini - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesGemini overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesGemini -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesGemini(v ConnectorResponsePropertiesGemini) error { - v.ConnectorTypeId = ".gemini" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesGemini performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesGemini -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesGemini(v ConnectorResponsePropertiesGemini) error { - v.ConnectorTypeId = ".gemini" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesIndex returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesIndex -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesIndex() (ConnectorResponsePropertiesIndex, error) { - var body ConnectorResponsePropertiesIndex - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesIndex overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesIndex -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesIndex(v ConnectorResponsePropertiesIndex) error { - v.ConnectorTypeId = ".index" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesIndex performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesIndex -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesIndex(v ConnectorResponsePropertiesIndex) error { - v.ConnectorTypeId = ".index" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesJira returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesJira -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesJira() (ConnectorResponsePropertiesJira, error) { - var body ConnectorResponsePropertiesJira - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesJira overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesJira -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesJira(v ConnectorResponsePropertiesJira) error { - v.ConnectorTypeId = ".jira" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesJira performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesJira -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesJira(v ConnectorResponsePropertiesJira) error { - v.ConnectorTypeId = ".jira" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesOpsgenie returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesOpsgenie -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesOpsgenie() (ConnectorResponsePropertiesOpsgenie, error) { - var body ConnectorResponsePropertiesOpsgenie - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesOpsgenie overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesOpsgenie -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesOpsgenie(v ConnectorResponsePropertiesOpsgenie) error { - v.ConnectorTypeId = ".opsgenie" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesOpsgenie performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesOpsgenie -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesOpsgenie(v ConnectorResponsePropertiesOpsgenie) error { - v.ConnectorTypeId = ".opsgenie" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesPagerduty returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesPagerduty -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesPagerduty() (ConnectorResponsePropertiesPagerduty, error) { - var body ConnectorResponsePropertiesPagerduty - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesPagerduty overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesPagerduty -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesPagerduty(v ConnectorResponsePropertiesPagerduty) error { - v.ConnectorTypeId = ".pagerduty" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesPagerduty performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesPagerduty -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesPagerduty(v ConnectorResponsePropertiesPagerduty) error { - v.ConnectorTypeId = ".pagerduty" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesResilient returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesResilient -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesResilient() (ConnectorResponsePropertiesResilient, error) { - var body ConnectorResponsePropertiesResilient - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesResilient overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesResilient -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesResilient(v ConnectorResponsePropertiesResilient) error { - v.ConnectorTypeId = ".resilient" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesResilient performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesResilient -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesResilient(v ConnectorResponsePropertiesResilient) error { - v.ConnectorTypeId = ".resilient" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesServerlog returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesServerlog -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesServerlog() (ConnectorResponsePropertiesServerlog, error) { - var body ConnectorResponsePropertiesServerlog - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesServerlog overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesServerlog -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesServerlog(v ConnectorResponsePropertiesServerlog) error { - v.ConnectorTypeId = ".server-log" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesServerlog performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesServerlog -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesServerlog(v ConnectorResponsePropertiesServerlog) error { - v.ConnectorTypeId = ".server-log" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesServicenow returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesServicenow -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesServicenow() (ConnectorResponsePropertiesServicenow, error) { - var body ConnectorResponsePropertiesServicenow - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesServicenow overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesServicenow -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesServicenow(v ConnectorResponsePropertiesServicenow) error { - v.ConnectorTypeId = ".servicenow" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesServicenow performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesServicenow -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesServicenow(v ConnectorResponsePropertiesServicenow) error { - v.ConnectorTypeId = ".servicenow" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesServicenowItom returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesServicenowItom -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesServicenowItom() (ConnectorResponsePropertiesServicenowItom, error) { - var body ConnectorResponsePropertiesServicenowItom - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesServicenowItom overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesServicenowItom -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesServicenowItom(v ConnectorResponsePropertiesServicenowItom) error { - v.ConnectorTypeId = ".servicenow-itom" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesServicenowItom performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesServicenowItom -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesServicenowItom(v ConnectorResponsePropertiesServicenowItom) error { - v.ConnectorTypeId = ".servicenow-itom" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesServicenowSir returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesServicenowSir -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesServicenowSir() (ConnectorResponsePropertiesServicenowSir, error) { - var body ConnectorResponsePropertiesServicenowSir - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesServicenowSir overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesServicenowSir -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesServicenowSir(v ConnectorResponsePropertiesServicenowSir) error { - v.ConnectorTypeId = ".servicenow-sir" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesServicenowSir performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesServicenowSir -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesServicenowSir(v ConnectorResponsePropertiesServicenowSir) error { - v.ConnectorTypeId = ".servicenow-sir" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesSlack returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesSlack -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesSlack() (ConnectorResponsePropertiesSlack, error) { - var body ConnectorResponsePropertiesSlack - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesSlack overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesSlack -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesSlack(v ConnectorResponsePropertiesSlack) error { - v.ConnectorTypeId = ".slack" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesSlack performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesSlack -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesSlack(v ConnectorResponsePropertiesSlack) error { - v.ConnectorTypeId = ".slack" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesSlackApi returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesSlackApi -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesSlackApi() (ConnectorResponsePropertiesSlackApi, error) { - var body ConnectorResponsePropertiesSlackApi - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesSlackApi overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesSlackApi -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesSlackApi(v ConnectorResponsePropertiesSlackApi) error { - v.ConnectorTypeId = ".slack_api" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesSlackApi performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesSlackApi -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesSlackApi(v ConnectorResponsePropertiesSlackApi) error { - v.ConnectorTypeId = ".slack_api" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesSwimlane returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesSwimlane -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesSwimlane() (ConnectorResponsePropertiesSwimlane, error) { - var body ConnectorResponsePropertiesSwimlane - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesSwimlane overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesSwimlane -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesSwimlane(v ConnectorResponsePropertiesSwimlane) error { - v.ConnectorTypeId = ".swimlane" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesSwimlane performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesSwimlane -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesSwimlane(v ConnectorResponsePropertiesSwimlane) error { - v.ConnectorTypeId = ".swimlane" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesTeams returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesTeams -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesTeams() (ConnectorResponsePropertiesTeams, error) { - var body ConnectorResponsePropertiesTeams - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesTeams overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesTeams -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesTeams(v ConnectorResponsePropertiesTeams) error { - v.ConnectorTypeId = ".teams" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesTeams performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesTeams -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesTeams(v ConnectorResponsePropertiesTeams) error { - v.ConnectorTypeId = ".teams" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesTines returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesTines -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesTines() (ConnectorResponsePropertiesTines, error) { - var body ConnectorResponsePropertiesTines - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesTines overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesTines -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesTines(v ConnectorResponsePropertiesTines) error { - v.ConnectorTypeId = ".tines" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesTines performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesTines -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesTines(v ConnectorResponsePropertiesTines) error { - v.ConnectorTypeId = ".tines" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesWebhook returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesWebhook -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesWebhook() (ConnectorResponsePropertiesWebhook, error) { - var body ConnectorResponsePropertiesWebhook - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesWebhook overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesWebhook -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesWebhook(v ConnectorResponsePropertiesWebhook) error { - v.ConnectorTypeId = ".webhook" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesWebhook performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesWebhook -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesWebhook(v ConnectorResponsePropertiesWebhook) error { - v.ConnectorTypeId = ".webhook" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsConnectorResponsePropertiesXmatters returns the union data inside the ConnectorResponseProperties as a ConnectorResponsePropertiesXmatters -func (t ConnectorResponseProperties) AsConnectorResponsePropertiesXmatters() (ConnectorResponsePropertiesXmatters, error) { - var body ConnectorResponsePropertiesXmatters - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromConnectorResponsePropertiesXmatters overwrites any union data inside the ConnectorResponseProperties as the provided ConnectorResponsePropertiesXmatters -func (t *ConnectorResponseProperties) FromConnectorResponsePropertiesXmatters(v ConnectorResponsePropertiesXmatters) error { - v.ConnectorTypeId = ".xmatters" - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeConnectorResponsePropertiesXmatters performs a merge with any union data inside the ConnectorResponseProperties, using the provided ConnectorResponsePropertiesXmatters -func (t *ConnectorResponseProperties) MergeConnectorResponsePropertiesXmatters(v ConnectorResponsePropertiesXmatters) error { - v.ConnectorTypeId = ".xmatters" - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t ConnectorResponseProperties) Discriminator() (string, error) { - var discriminator struct { - Discriminator string `json:"connector_type_id"` - } - err := json.Unmarshal(t.union, &discriminator) - return discriminator.Discriminator, err -} - -func (t ConnectorResponseProperties) ValueByDiscriminator() (interface{}, error) { - discriminator, err := t.Discriminator() - if err != nil { - return nil, err - } - switch discriminator { - case ".cases-webhook": - return t.AsConnectorResponsePropertiesCasesWebhook() - case ".email": - return t.AsConnectorResponsePropertiesEmail() - case ".gemini": - return t.AsConnectorResponsePropertiesGemini() - case ".index": - return t.AsConnectorResponsePropertiesIndex() - case ".jira": - return t.AsConnectorResponsePropertiesJira() - case ".opsgenie": - return t.AsConnectorResponsePropertiesOpsgenie() - case ".pagerduty": - return t.AsConnectorResponsePropertiesPagerduty() - case ".resilient": - return t.AsConnectorResponsePropertiesResilient() - case ".server-log": - return t.AsConnectorResponsePropertiesServerlog() - case ".servicenow": - return t.AsConnectorResponsePropertiesServicenow() - case ".servicenow-itom": - return t.AsConnectorResponsePropertiesServicenowItom() - case ".servicenow-sir": - return t.AsConnectorResponsePropertiesServicenowSir() - case ".slack": - return t.AsConnectorResponsePropertiesSlack() - case ".slack_api": - return t.AsConnectorResponsePropertiesSlackApi() - case ".swimlane": - return t.AsConnectorResponsePropertiesSwimlane() - case ".teams": - return t.AsConnectorResponsePropertiesTeams() - case ".tines": - return t.AsConnectorResponsePropertiesTines() - case ".webhook": - return t.AsConnectorResponsePropertiesWebhook() - case ".xmatters": - return t.AsConnectorResponsePropertiesXmatters() - default: - return nil, errors.New("unknown discriminator value: " + discriminator) - } -} - -func (t ConnectorResponseProperties) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *ConnectorResponseProperties) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsLegacyRunConnectorGeneralResponseData0 returns the union data inside the LegacyRunConnectorGeneralResponse_Data as a LegacyRunConnectorGeneralResponseData0 -func (t LegacyRunConnectorGeneralResponse_Data) AsLegacyRunConnectorGeneralResponseData0() (LegacyRunConnectorGeneralResponseData0, error) { - var body LegacyRunConnectorGeneralResponseData0 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromLegacyRunConnectorGeneralResponseData0 overwrites any union data inside the LegacyRunConnectorGeneralResponse_Data as the provided LegacyRunConnectorGeneralResponseData0 -func (t *LegacyRunConnectorGeneralResponse_Data) FromLegacyRunConnectorGeneralResponseData0(v LegacyRunConnectorGeneralResponseData0) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeLegacyRunConnectorGeneralResponseData0 performs a merge with any union data inside the LegacyRunConnectorGeneralResponse_Data, using the provided LegacyRunConnectorGeneralResponseData0 -func (t *LegacyRunConnectorGeneralResponse_Data) MergeLegacyRunConnectorGeneralResponseData0(v LegacyRunConnectorGeneralResponseData0) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsLegacyRunConnectorGeneralResponseData1 returns the union data inside the LegacyRunConnectorGeneralResponse_Data as a LegacyRunConnectorGeneralResponseData1 -func (t LegacyRunConnectorGeneralResponse_Data) AsLegacyRunConnectorGeneralResponseData1() (LegacyRunConnectorGeneralResponseData1, error) { - var body LegacyRunConnectorGeneralResponseData1 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromLegacyRunConnectorGeneralResponseData1 overwrites any union data inside the LegacyRunConnectorGeneralResponse_Data as the provided LegacyRunConnectorGeneralResponseData1 -func (t *LegacyRunConnectorGeneralResponse_Data) FromLegacyRunConnectorGeneralResponseData1(v LegacyRunConnectorGeneralResponseData1) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeLegacyRunConnectorGeneralResponseData1 performs a merge with any union data inside the LegacyRunConnectorGeneralResponse_Data, using the provided LegacyRunConnectorGeneralResponseData1 -func (t *LegacyRunConnectorGeneralResponse_Data) MergeLegacyRunConnectorGeneralResponseData1(v LegacyRunConnectorGeneralResponseData1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t LegacyRunConnectorGeneralResponse_Data) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *LegacyRunConnectorGeneralResponse_Data) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsRunConnectorGeneralResponseData0 returns the union data inside the RunConnectorGeneralResponse_Data as a RunConnectorGeneralResponseData0 -func (t RunConnectorGeneralResponse_Data) AsRunConnectorGeneralResponseData0() (RunConnectorGeneralResponseData0, error) { - var body RunConnectorGeneralResponseData0 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromRunConnectorGeneralResponseData0 overwrites any union data inside the RunConnectorGeneralResponse_Data as the provided RunConnectorGeneralResponseData0 -func (t *RunConnectorGeneralResponse_Data) FromRunConnectorGeneralResponseData0(v RunConnectorGeneralResponseData0) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeRunConnectorGeneralResponseData0 performs a merge with any union data inside the RunConnectorGeneralResponse_Data, using the provided RunConnectorGeneralResponseData0 -func (t *RunConnectorGeneralResponse_Data) MergeRunConnectorGeneralResponseData0(v RunConnectorGeneralResponseData0) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsRunConnectorGeneralResponseData1 returns the union data inside the RunConnectorGeneralResponse_Data as a RunConnectorGeneralResponseData1 -func (t RunConnectorGeneralResponse_Data) AsRunConnectorGeneralResponseData1() (RunConnectorGeneralResponseData1, error) { - var body RunConnectorGeneralResponseData1 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromRunConnectorGeneralResponseData1 overwrites any union data inside the RunConnectorGeneralResponse_Data as the provided RunConnectorGeneralResponseData1 -func (t *RunConnectorGeneralResponse_Data) FromRunConnectorGeneralResponseData1(v RunConnectorGeneralResponseData1) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeRunConnectorGeneralResponseData1 performs a merge with any union data inside the RunConnectorGeneralResponse_Data, using the provided RunConnectorGeneralResponseData1 -func (t *RunConnectorGeneralResponse_Data) MergeRunConnectorGeneralResponseData1(v RunConnectorGeneralResponseData1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t RunConnectorGeneralResponse_Data) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *RunConnectorGeneralResponse_Data) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0 returns the union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp as a RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0 -func (t RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp) AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0() (RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0, error) { - var body RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0 overwrites any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp as the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp) FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0 performs a merge with any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp, using the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp) MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp0) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1 returns the union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp as a RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1 -func (t RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp) AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1() (RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1, error) { - var body RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1 overwrites any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp as the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp) FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1 performs a merge with any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp, using the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp) MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentDestIp1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_DestIp) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0 returns the union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash as a RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0 -func (t RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash) AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0() (RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0, error) { - var body RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0 overwrites any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash as the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash) FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0 performs a merge with any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash, using the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash) MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash0) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1 returns the union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash as a RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1 -func (t RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash) AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1() (RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1, error) { - var body RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1 overwrites any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash as the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash) FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1 performs a merge with any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash, using the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash) MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentMalwareHash1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_MalwareHash) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0 returns the union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp as a RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0 -func (t RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp) AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0() (RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0, error) { - var body RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0 overwrites any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp as the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp) FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0 performs a merge with any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp, using the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp) MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp0) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1 returns the union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp as a RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1 -func (t RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp) AsRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1() (RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1, error) { - var body RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1 overwrites any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp as the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp) FromRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1) error { - b, err := json.Marshal(v) - t.union = b - return err -} - -// MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1 performs a merge with any union data inside the RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp, using the provided RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1 -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp) MergeRunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1(v RunConnectorSubactionPushtoserviceSubActionParamsIncidentSourceIp1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } - - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *RunConnectorSubactionPushtoservice_SubActionParams_Incident_SourceIp) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// RequestEditorFn is the function signature for the RequestEditor callback function -type RequestEditorFn func(ctx context.Context, req *http.Request) error - -// Doer performs HTTP requests. -// -// The standard http.Client implements this interface. -type HttpRequestDoer interface { - Do(req *http.Request) (*http.Response, error) -} - -// Client which conforms to the OpenAPI3 specification for this service. -type Client struct { - // The endpoint of the server conforming to this interface, with scheme, - // https://api.deepmap.com for example. This can contain a path relative - // to the server, such as https://api.deepmap.com/dev-test, and all the - // paths in the swagger spec will be appended to the server. - Server string - - // Doer for performing requests, typically a *http.Client with any - // customized settings, such as certificate chains. - Client HttpRequestDoer - - // A list of callbacks for modifying requests which are generated before sending over - // the network. - RequestEditors []RequestEditorFn -} - -// ClientOption allows setting custom parameters during construction -type ClientOption func(*Client) error - -// Creates a new Client, with reasonable defaults -func NewClient(server string, opts ...ClientOption) (*Client, error) { - // create a client with sane default values - client := Client{ - Server: server, - } - // mutate client and add all optional params - for _, o := range opts { - if err := o(&client); err != nil { - return nil, err - } - } - // ensure the server URL always has a trailing slash - if !strings.HasSuffix(client.Server, "/") { - client.Server += "/" - } - // create httpClient, if not already present - if client.Client == nil { - client.Client = &http.Client{} - } - return &client, nil -} - -// WithHTTPClient allows overriding the default Doer, which is -// automatically created using http.Client. This is useful for tests. -func WithHTTPClient(doer HttpRequestDoer) ClientOption { - return func(c *Client) error { - c.Client = doer - return nil - } -} - -// WithRequestEditorFn allows setting up a callback function, which will be -// called right before sending the request. This can be used to mutate the request. -func WithRequestEditorFn(fn RequestEditorFn) ClientOption { - return func(c *Client) error { - c.RequestEditors = append(c.RequestEditors, fn) - return nil - } -} - -// The interface specification for the client above. -type ClientInterface interface { - // LegacyGetConnectors request - LegacyGetConnectors(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) - - // LegacyCreateConnectorWithBody request with any body - LegacyCreateConnectorWithBody(ctx context.Context, spaceId SpaceId, params *LegacyCreateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - - LegacyCreateConnector(ctx context.Context, spaceId SpaceId, params *LegacyCreateConnectorParams, body LegacyCreateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - - // LegacyDeleteConnector request - LegacyDeleteConnector(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyDeleteConnectorParams, reqEditors ...RequestEditorFn) (*http.Response, error) - - // LegacyGetConnector request - LegacyGetConnector(ctx context.Context, spaceId SpaceId, actionId ActionId, reqEditors ...RequestEditorFn) (*http.Response, error) - - // LegacyUpdateConnectorWithBody request with any body - LegacyUpdateConnectorWithBody(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyUpdateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - - LegacyUpdateConnector(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyUpdateConnectorParams, body LegacyUpdateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - - // LegacyRunConnectorWithBody request with any body - LegacyRunConnectorWithBody(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyRunConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - - LegacyRunConnector(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyRunConnectorParams, body LegacyRunConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - - // CreateConnectorWithBody request with any body - CreateConnectorWithBody(ctx context.Context, spaceId SpaceId, params *CreateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - - CreateConnector(ctx context.Context, spaceId SpaceId, params *CreateConnectorParams, body CreateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - - // DeleteConnector request - DeleteConnector(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *DeleteConnectorParams, reqEditors ...RequestEditorFn) (*http.Response, error) - - // GetConnector request - GetConnector(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, reqEditors ...RequestEditorFn) (*http.Response, error) - - // UpdateConnectorWithBody request with any body - UpdateConnectorWithBody(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *UpdateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - - UpdateConnector(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *UpdateConnectorParams, body UpdateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - - // RunConnectorWithBody request with any body - RunConnectorWithBody(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *RunConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) - - RunConnector(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *RunConnectorParams, body RunConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) - - // GetConnectorTypes request - GetConnectorTypes(ctx context.Context, spaceId SpaceId, params *GetConnectorTypesParams, reqEditors ...RequestEditorFn) (*http.Response, error) - - // GetConnectors request - GetConnectors(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) - - // LegacyGetConnectorTypes request - LegacyGetConnectorTypes(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) -} - -func (c *Client) LegacyGetConnectors(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewLegacyGetConnectorsRequest(c.Server, spaceId) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) LegacyCreateConnectorWithBody(ctx context.Context, spaceId SpaceId, params *LegacyCreateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewLegacyCreateConnectorRequestWithBody(c.Server, spaceId, params, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) LegacyCreateConnector(ctx context.Context, spaceId SpaceId, params *LegacyCreateConnectorParams, body LegacyCreateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewLegacyCreateConnectorRequest(c.Server, spaceId, params, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) LegacyDeleteConnector(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyDeleteConnectorParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewLegacyDeleteConnectorRequest(c.Server, spaceId, actionId, params) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) LegacyGetConnector(ctx context.Context, spaceId SpaceId, actionId ActionId, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewLegacyGetConnectorRequest(c.Server, spaceId, actionId) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) LegacyUpdateConnectorWithBody(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyUpdateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewLegacyUpdateConnectorRequestWithBody(c.Server, spaceId, actionId, params, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) LegacyUpdateConnector(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyUpdateConnectorParams, body LegacyUpdateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewLegacyUpdateConnectorRequest(c.Server, spaceId, actionId, params, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) LegacyRunConnectorWithBody(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyRunConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewLegacyRunConnectorRequestWithBody(c.Server, spaceId, actionId, params, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) LegacyRunConnector(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyRunConnectorParams, body LegacyRunConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewLegacyRunConnectorRequest(c.Server, spaceId, actionId, params, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) CreateConnectorWithBody(ctx context.Context, spaceId SpaceId, params *CreateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateConnectorRequestWithBody(c.Server, spaceId, params, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) CreateConnector(ctx context.Context, spaceId SpaceId, params *CreateConnectorParams, body CreateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateConnectorRequest(c.Server, spaceId, params, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) DeleteConnector(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *DeleteConnectorParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteConnectorRequest(c.Server, spaceId, connectorId, params) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) GetConnector(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetConnectorRequest(c.Server, spaceId, connectorId) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) UpdateConnectorWithBody(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *UpdateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateConnectorRequestWithBody(c.Server, spaceId, connectorId, params, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) UpdateConnector(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *UpdateConnectorParams, body UpdateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateConnectorRequest(c.Server, spaceId, connectorId, params, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) RunConnectorWithBody(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *RunConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewRunConnectorRequestWithBody(c.Server, spaceId, connectorId, params, contentType, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) RunConnector(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *RunConnectorParams, body RunConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewRunConnectorRequest(c.Server, spaceId, connectorId, params, body) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) GetConnectorTypes(ctx context.Context, spaceId SpaceId, params *GetConnectorTypesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetConnectorTypesRequest(c.Server, spaceId, params) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) GetConnectors(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetConnectorsRequest(c.Server, spaceId) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -func (c *Client) LegacyGetConnectorTypes(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewLegacyGetConnectorTypesRequest(c.Server, spaceId) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} - -// NewLegacyGetConnectorsRequest generates requests for LegacyGetConnectors -func NewLegacyGetConnectorsRequest(server string, spaceId SpaceId) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewLegacyCreateConnectorRequest calls the generic LegacyCreateConnector builder with application/json body -func NewLegacyCreateConnectorRequest(server string, spaceId SpaceId, params *LegacyCreateConnectorParams, body LegacyCreateConnectorJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewLegacyCreateConnectorRequestWithBody(server, spaceId, params, "application/json", bodyReader) -} - -// NewLegacyCreateConnectorRequestWithBody generates requests for LegacyCreateConnector with any type of body -func NewLegacyCreateConnectorRequestWithBody(server string, spaceId SpaceId, params *LegacyCreateConnectorParams, contentType string, body io.Reader) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - if params != nil { - - var headerParam0 string - - headerParam0, err = runtime.StyleParamWithLocation("simple", false, "kbn-xsrf", runtime.ParamLocationHeader, params.KbnXsrf) - if err != nil { - return nil, err - } - - req.Header.Set("kbn-xsrf", headerParam0) - - } - - return req, nil -} - -// NewLegacyDeleteConnectorRequest generates requests for LegacyDeleteConnector -func NewLegacyDeleteConnectorRequest(server string, spaceId SpaceId, actionId ActionId, params *LegacyDeleteConnectorParams) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "actionId", runtime.ParamLocationPath, actionId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/action/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("DELETE", queryURL.String(), nil) - if err != nil { - return nil, err - } - - if params != nil { - - var headerParam0 string - - headerParam0, err = runtime.StyleParamWithLocation("simple", false, "kbn-xsrf", runtime.ParamLocationHeader, params.KbnXsrf) - if err != nil { - return nil, err - } - - req.Header.Set("kbn-xsrf", headerParam0) - - } - - return req, nil -} - -// NewLegacyGetConnectorRequest generates requests for LegacyGetConnector -func NewLegacyGetConnectorRequest(server string, spaceId SpaceId, actionId ActionId) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "actionId", runtime.ParamLocationPath, actionId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/action/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewLegacyUpdateConnectorRequest calls the generic LegacyUpdateConnector builder with application/json body -func NewLegacyUpdateConnectorRequest(server string, spaceId SpaceId, actionId ActionId, params *LegacyUpdateConnectorParams, body LegacyUpdateConnectorJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewLegacyUpdateConnectorRequestWithBody(server, spaceId, actionId, params, "application/json", bodyReader) -} - -// NewLegacyUpdateConnectorRequestWithBody generates requests for LegacyUpdateConnector with any type of body -func NewLegacyUpdateConnectorRequestWithBody(server string, spaceId SpaceId, actionId ActionId, params *LegacyUpdateConnectorParams, contentType string, body io.Reader) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "actionId", runtime.ParamLocationPath, actionId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/action/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("PUT", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - if params != nil { - - var headerParam0 string - - headerParam0, err = runtime.StyleParamWithLocation("simple", false, "kbn-xsrf", runtime.ParamLocationHeader, params.KbnXsrf) - if err != nil { - return nil, err - } - - req.Header.Set("kbn-xsrf", headerParam0) - - } - - return req, nil -} - -// NewLegacyRunConnectorRequest calls the generic LegacyRunConnector builder with application/json body -func NewLegacyRunConnectorRequest(server string, spaceId SpaceId, actionId ActionId, params *LegacyRunConnectorParams, body LegacyRunConnectorJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewLegacyRunConnectorRequestWithBody(server, spaceId, actionId, params, "application/json", bodyReader) -} - -// NewLegacyRunConnectorRequestWithBody generates requests for LegacyRunConnector with any type of body -func NewLegacyRunConnectorRequestWithBody(server string, spaceId SpaceId, actionId ActionId, params *LegacyRunConnectorParams, contentType string, body io.Reader) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "actionId", runtime.ParamLocationPath, actionId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/action/%s/_execute", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - if params != nil { - - var headerParam0 string - - headerParam0, err = runtime.StyleParamWithLocation("simple", false, "kbn-xsrf", runtime.ParamLocationHeader, params.KbnXsrf) - if err != nil { - return nil, err - } - - req.Header.Set("kbn-xsrf", headerParam0) - - } - - return req, nil -} - -// NewCreateConnectorRequest calls the generic CreateConnector builder with application/json body -func NewCreateConnectorRequest(server string, spaceId SpaceId, params *CreateConnectorParams, body CreateConnectorJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewCreateConnectorRequestWithBody(server, spaceId, params, "application/json", bodyReader) -} - -// NewCreateConnectorRequestWithBody generates requests for CreateConnector with any type of body -func NewCreateConnectorRequestWithBody(server string, spaceId SpaceId, params *CreateConnectorParams, contentType string, body io.Reader) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/connector", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - if params != nil { - - var headerParam0 string - - headerParam0, err = runtime.StyleParamWithLocation("simple", false, "kbn-xsrf", runtime.ParamLocationHeader, params.KbnXsrf) - if err != nil { - return nil, err - } - - req.Header.Set("kbn-xsrf", headerParam0) - - } - - return req, nil -} - -// NewDeleteConnectorRequest generates requests for DeleteConnector -func NewDeleteConnectorRequest(server string, spaceId SpaceId, connectorId ConnectorId, params *DeleteConnectorParams) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "connectorId", runtime.ParamLocationPath, connectorId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("DELETE", queryURL.String(), nil) - if err != nil { - return nil, err - } - - if params != nil { - - var headerParam0 string - - headerParam0, err = runtime.StyleParamWithLocation("simple", false, "kbn-xsrf", runtime.ParamLocationHeader, params.KbnXsrf) - if err != nil { - return nil, err - } - - req.Header.Set("kbn-xsrf", headerParam0) - - } - - return req, nil -} - -// NewGetConnectorRequest generates requests for GetConnector -func NewGetConnectorRequest(server string, spaceId SpaceId, connectorId ConnectorId) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "connectorId", runtime.ParamLocationPath, connectorId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewUpdateConnectorRequest calls the generic UpdateConnector builder with application/json body -func NewUpdateConnectorRequest(server string, spaceId SpaceId, connectorId ConnectorId, params *UpdateConnectorParams, body UpdateConnectorJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewUpdateConnectorRequestWithBody(server, spaceId, connectorId, params, "application/json", bodyReader) -} - -// NewUpdateConnectorRequestWithBody generates requests for UpdateConnector with any type of body -func NewUpdateConnectorRequestWithBody(server string, spaceId SpaceId, connectorId ConnectorId, params *UpdateConnectorParams, contentType string, body io.Reader) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "connectorId", runtime.ParamLocationPath, connectorId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("PUT", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - if params != nil { - - var headerParam0 string - - headerParam0, err = runtime.StyleParamWithLocation("simple", false, "kbn-xsrf", runtime.ParamLocationHeader, params.KbnXsrf) - if err != nil { - return nil, err - } - - req.Header.Set("kbn-xsrf", headerParam0) - - } - - return req, nil -} - -// NewRunConnectorRequest calls the generic RunConnector builder with application/json body -func NewRunConnectorRequest(server string, spaceId SpaceId, connectorId ConnectorId, params *RunConnectorParams, body RunConnectorJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewRunConnectorRequestWithBody(server, spaceId, connectorId, params, "application/json", bodyReader) -} - -// NewRunConnectorRequestWithBody generates requests for RunConnector with any type of body -func NewRunConnectorRequestWithBody(server string, spaceId SpaceId, connectorId ConnectorId, params *RunConnectorParams, contentType string, body io.Reader) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - var pathParam1 string - - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "connectorId", runtime.ParamLocationPath, connectorId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s/_execute", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", contentType) - - if params != nil { - - var headerParam0 string - - headerParam0, err = runtime.StyleParamWithLocation("simple", false, "kbn-xsrf", runtime.ParamLocationHeader, params.KbnXsrf) - if err != nil { - return nil, err - } - - req.Header.Set("kbn-xsrf", headerParam0) - - } - - return req, nil -} - -// NewGetConnectorTypesRequest generates requests for GetConnectorTypes -func NewGetConnectorTypesRequest(server string, spaceId SpaceId, params *GetConnectorTypesParams) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/connector_types", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - if params != nil { - queryValues := queryURL.Query() - - if params.FeatureId != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "feature_id", runtime.ParamLocationQuery, *params.FeatureId); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - - } - - queryURL.RawQuery = queryValues.Encode() - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewGetConnectorsRequest generates requests for GetConnectors -func NewGetConnectorsRequest(server string, spaceId SpaceId) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/connectors", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -// NewLegacyGetConnectorTypesRequest generates requests for LegacyGetConnectorTypes -func NewLegacyGetConnectorTypesRequest(server string, spaceId SpaceId) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } - - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } - - operationPath := fmt.Sprintf("/s/%s/api/actions/list_action_types", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } - - return req, nil -} - -func (c *Client) applyEditors(ctx context.Context, req *http.Request, additionalEditors []RequestEditorFn) error { - for _, r := range c.RequestEditors { - if err := r(ctx, req); err != nil { - return err - } - } - for _, r := range additionalEditors { - if err := r(ctx, req); err != nil { - return err - } - } - return nil -} - -// ClientWithResponses builds on ClientInterface to offer response payloads -type ClientWithResponses struct { - ClientInterface -} - -// NewClientWithResponses creates a new ClientWithResponses, which wraps -// Client with return type handling -func NewClientWithResponses(server string, opts ...ClientOption) (*ClientWithResponses, error) { - client, err := NewClient(server, opts...) - if err != nil { - return nil, err - } - return &ClientWithResponses{client}, nil -} - -// WithBaseURL overrides the baseURL. -func WithBaseURL(baseURL string) ClientOption { - return func(c *Client) error { - newBaseURL, err := url.Parse(baseURL) - if err != nil { - return err - } - c.Server = newBaseURL.String() - return nil - } -} - -// ClientWithResponsesInterface is the interface specification for the client with responses above. -type ClientWithResponsesInterface interface { - // LegacyGetConnectorsWithResponse request - LegacyGetConnectorsWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*LegacyGetConnectorsResponse, error) - - // LegacyCreateConnectorWithBodyWithResponse request with any body - LegacyCreateConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, params *LegacyCreateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*LegacyCreateConnectorResponse, error) - - LegacyCreateConnectorWithResponse(ctx context.Context, spaceId SpaceId, params *LegacyCreateConnectorParams, body LegacyCreateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*LegacyCreateConnectorResponse, error) - - // LegacyDeleteConnectorWithResponse request - LegacyDeleteConnectorWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyDeleteConnectorParams, reqEditors ...RequestEditorFn) (*LegacyDeleteConnectorResponse, error) - - // LegacyGetConnectorWithResponse request - LegacyGetConnectorWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, reqEditors ...RequestEditorFn) (*LegacyGetConnectorResponse, error) - - // LegacyUpdateConnectorWithBodyWithResponse request with any body - LegacyUpdateConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyUpdateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*LegacyUpdateConnectorResponse, error) - - LegacyUpdateConnectorWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyUpdateConnectorParams, body LegacyUpdateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*LegacyUpdateConnectorResponse, error) - - // LegacyRunConnectorWithBodyWithResponse request with any body - LegacyRunConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyRunConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*LegacyRunConnectorResponse, error) - - LegacyRunConnectorWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyRunConnectorParams, body LegacyRunConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*LegacyRunConnectorResponse, error) - - // CreateConnectorWithBodyWithResponse request with any body - CreateConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, params *CreateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateConnectorResponse, error) - - CreateConnectorWithResponse(ctx context.Context, spaceId SpaceId, params *CreateConnectorParams, body CreateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateConnectorResponse, error) - - // DeleteConnectorWithResponse request - DeleteConnectorWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *DeleteConnectorParams, reqEditors ...RequestEditorFn) (*DeleteConnectorResponse, error) - - // GetConnectorWithResponse request - GetConnectorWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, reqEditors ...RequestEditorFn) (*GetConnectorResponse, error) - - // UpdateConnectorWithBodyWithResponse request with any body - UpdateConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *UpdateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateConnectorResponse, error) - - UpdateConnectorWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *UpdateConnectorParams, body UpdateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateConnectorResponse, error) - - // RunConnectorWithBodyWithResponse request with any body - RunConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *RunConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RunConnectorResponse, error) - - RunConnectorWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *RunConnectorParams, body RunConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*RunConnectorResponse, error) - - // GetConnectorTypesWithResponse request - GetConnectorTypesWithResponse(ctx context.Context, spaceId SpaceId, params *GetConnectorTypesParams, reqEditors ...RequestEditorFn) (*GetConnectorTypesResponse, error) - - // GetConnectorsWithResponse request - GetConnectorsWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetConnectorsResponse, error) - - // LegacyGetConnectorTypesWithResponse request - LegacyGetConnectorTypesWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*LegacyGetConnectorTypesResponse, error) -} - -type LegacyGetConnectorsResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *[]ActionResponseProperties - JSON401 *AuthorizationError -} - -// Status returns HTTPResponse.Status -func (r LegacyGetConnectorsResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r LegacyGetConnectorsResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type LegacyCreateConnectorResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *N200Actions - JSON401 *AuthorizationError -} - -// Status returns HTTPResponse.Status -func (r LegacyCreateConnectorResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r LegacyCreateConnectorResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type LegacyDeleteConnectorResponse struct { - Body []byte - HTTPResponse *http.Response - JSON401 *AuthorizationError -} - -// Status returns HTTPResponse.Status -func (r LegacyDeleteConnectorResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r LegacyDeleteConnectorResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type LegacyGetConnectorResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *N200Actions - JSON401 *AuthorizationError -} - -// Status returns HTTPResponse.Status -func (r LegacyGetConnectorResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r LegacyGetConnectorResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type LegacyUpdateConnectorResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *N200Actions - JSON404 *ObjectNotFoundError -} - -// Status returns HTTPResponse.Status -func (r LegacyUpdateConnectorResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r LegacyUpdateConnectorResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type LegacyRunConnectorResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *LegacyRunConnectorGeneralResponse - JSON401 *AuthorizationError -} - -// Status returns HTTPResponse.Status -func (r LegacyRunConnectorResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r LegacyRunConnectorResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type CreateConnectorResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *ConnectorResponseProperties - JSON400 *BadRequestError - JSON401 *AuthorizationError -} - -// Status returns HTTPResponse.Status -func (r CreateConnectorResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r CreateConnectorResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type DeleteConnectorResponse struct { - Body []byte - HTTPResponse *http.Response - JSON401 *AuthorizationError - JSON404 *struct { - Error *string `json:"error,omitempty"` - Message *string `json:"message,omitempty"` - StatusCode *int `json:"statusCode,omitempty"` - } -} - -// Status returns HTTPResponse.Status -func (r DeleteConnectorResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteConnectorResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type GetConnectorResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *ConnectorResponseProperties - JSON401 *AuthorizationError - JSON404 *struct { - Error *string `json:"error,omitempty"` - Message *string `json:"message,omitempty"` - StatusCode *int `json:"statusCode,omitempty"` - } -} - -// Status returns HTTPResponse.Status -func (r GetConnectorResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r GetConnectorResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type UpdateConnectorResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *ConnectorResponseProperties - JSON400 *BadRequestError - JSON401 *AuthorizationError - JSON404 *ObjectNotFoundError -} - -// Status returns HTTPResponse.Status -func (r UpdateConnectorResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r UpdateConnectorResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type RunConnectorResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *RunConnectorGeneralResponse - JSON401 *AuthorizationError -} - -// Status returns HTTPResponse.Status -func (r RunConnectorResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r RunConnectorResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type GetConnectorTypesResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *[]struct { - // Enabled Indicates whether the connector type is enabled in Kibana. - Enabled *bool `json:"enabled,omitempty"` - - // EnabledInConfig Indicates whether the connector type is enabled in the Kibana `.yml` file. - EnabledInConfig *bool `json:"enabled_in_config,omitempty"` - - // EnabledInLicense Indicates whether the connector is enabled in the license. - EnabledInLicense *bool `json:"enabled_in_license,omitempty"` - - // Id The type of connector. For example, `.email`, `.index`, `.jira`, `.opsgenie`, or `.server-log`. - Id *ConnectorTypes `json:"id,omitempty"` - - // MinimumLicenseRequired The license that is required to use the connector type. - MinimumLicenseRequired *string `json:"minimum_license_required,omitempty"` - - // Name The name of the connector type. - Name *string `json:"name,omitempty"` - - // SupportedFeatureIds The Kibana features that are supported by the connector type. - SupportedFeatureIds *[]Features `json:"supported_feature_ids,omitempty"` - } - JSON401 *AuthorizationError -} - -// Status returns HTTPResponse.Status -func (r GetConnectorTypesResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r GetConnectorTypesResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type GetConnectorsResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *[]struct { - // Config The configuration for the connector. Configuration properties vary depending on the connector type. - Config *map[string]interface{} `json:"config"` - - // ConnectorTypeId The type of connector. For example, `.email`, `.index`, `.jira`, `.opsgenie`, or `.server-log`. - ConnectorTypeId ConnectorTypes `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` - - // ReferencedByCount Indicates the number of saved objects that reference the connector. If `is_preconfigured` is true, this value is not calculated. - ReferencedByCount int `json:"referenced_by_count"` - } - JSON401 *AuthorizationError -} - -// Status returns HTTPResponse.Status -func (r GetConnectorsResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r GetConnectorsResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -type LegacyGetConnectorTypesResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *[]struct { - // Enabled Indicates whether the connector type is enabled in Kibana. - Enabled *bool `json:"enabled,omitempty"` - - // EnabledInConfig Indicates whether the connector type is enabled in the Kibana `.yml` file. - EnabledInConfig *bool `json:"enabledInConfig,omitempty"` - - // EnabledInLicense Indicates whether the connector is enabled in the license. - EnabledInLicense *bool `json:"enabledInLicense,omitempty"` - - // Id The unique identifier for the connector type. - Id *string `json:"id,omitempty"` - - // MinimumLicenseRequired The license that is required to use the connector type. - MinimumLicenseRequired *string `json:"minimumLicenseRequired,omitempty"` - - // Name The name of the connector type. - Name *string `json:"name,omitempty"` - } - JSON401 *AuthorizationError -} - -// Status returns HTTPResponse.Status -func (r LegacyGetConnectorTypesResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} - -// StatusCode returns HTTPResponse.StatusCode -func (r LegacyGetConnectorTypesResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 -} - -// LegacyGetConnectorsWithResponse request returning *LegacyGetConnectorsResponse -func (c *ClientWithResponses) LegacyGetConnectorsWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*LegacyGetConnectorsResponse, error) { - rsp, err := c.LegacyGetConnectors(ctx, spaceId, reqEditors...) - if err != nil { - return nil, err - } - return ParseLegacyGetConnectorsResponse(rsp) -} - -// LegacyCreateConnectorWithBodyWithResponse request with arbitrary body returning *LegacyCreateConnectorResponse -func (c *ClientWithResponses) LegacyCreateConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, params *LegacyCreateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*LegacyCreateConnectorResponse, error) { - rsp, err := c.LegacyCreateConnectorWithBody(ctx, spaceId, params, contentType, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseLegacyCreateConnectorResponse(rsp) -} - -func (c *ClientWithResponses) LegacyCreateConnectorWithResponse(ctx context.Context, spaceId SpaceId, params *LegacyCreateConnectorParams, body LegacyCreateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*LegacyCreateConnectorResponse, error) { - rsp, err := c.LegacyCreateConnector(ctx, spaceId, params, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseLegacyCreateConnectorResponse(rsp) -} - -// LegacyDeleteConnectorWithResponse request returning *LegacyDeleteConnectorResponse -func (c *ClientWithResponses) LegacyDeleteConnectorWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyDeleteConnectorParams, reqEditors ...RequestEditorFn) (*LegacyDeleteConnectorResponse, error) { - rsp, err := c.LegacyDeleteConnector(ctx, spaceId, actionId, params, reqEditors...) - if err != nil { - return nil, err - } - return ParseLegacyDeleteConnectorResponse(rsp) -} - -// LegacyGetConnectorWithResponse request returning *LegacyGetConnectorResponse -func (c *ClientWithResponses) LegacyGetConnectorWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, reqEditors ...RequestEditorFn) (*LegacyGetConnectorResponse, error) { - rsp, err := c.LegacyGetConnector(ctx, spaceId, actionId, reqEditors...) - if err != nil { - return nil, err - } - return ParseLegacyGetConnectorResponse(rsp) -} - -// LegacyUpdateConnectorWithBodyWithResponse request with arbitrary body returning *LegacyUpdateConnectorResponse -func (c *ClientWithResponses) LegacyUpdateConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyUpdateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*LegacyUpdateConnectorResponse, error) { - rsp, err := c.LegacyUpdateConnectorWithBody(ctx, spaceId, actionId, params, contentType, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseLegacyUpdateConnectorResponse(rsp) -} - -func (c *ClientWithResponses) LegacyUpdateConnectorWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyUpdateConnectorParams, body LegacyUpdateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*LegacyUpdateConnectorResponse, error) { - rsp, err := c.LegacyUpdateConnector(ctx, spaceId, actionId, params, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseLegacyUpdateConnectorResponse(rsp) -} - -// LegacyRunConnectorWithBodyWithResponse request with arbitrary body returning *LegacyRunConnectorResponse -func (c *ClientWithResponses) LegacyRunConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyRunConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*LegacyRunConnectorResponse, error) { - rsp, err := c.LegacyRunConnectorWithBody(ctx, spaceId, actionId, params, contentType, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseLegacyRunConnectorResponse(rsp) -} - -func (c *ClientWithResponses) LegacyRunConnectorWithResponse(ctx context.Context, spaceId SpaceId, actionId ActionId, params *LegacyRunConnectorParams, body LegacyRunConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*LegacyRunConnectorResponse, error) { - rsp, err := c.LegacyRunConnector(ctx, spaceId, actionId, params, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseLegacyRunConnectorResponse(rsp) -} - -// CreateConnectorWithBodyWithResponse request with arbitrary body returning *CreateConnectorResponse -func (c *ClientWithResponses) CreateConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, params *CreateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateConnectorResponse, error) { - rsp, err := c.CreateConnectorWithBody(ctx, spaceId, params, contentType, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseCreateConnectorResponse(rsp) -} - -func (c *ClientWithResponses) CreateConnectorWithResponse(ctx context.Context, spaceId SpaceId, params *CreateConnectorParams, body CreateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateConnectorResponse, error) { - rsp, err := c.CreateConnector(ctx, spaceId, params, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseCreateConnectorResponse(rsp) -} - -// DeleteConnectorWithResponse request returning *DeleteConnectorResponse -func (c *ClientWithResponses) DeleteConnectorWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *DeleteConnectorParams, reqEditors ...RequestEditorFn) (*DeleteConnectorResponse, error) { - rsp, err := c.DeleteConnector(ctx, spaceId, connectorId, params, reqEditors...) - if err != nil { - return nil, err - } - return ParseDeleteConnectorResponse(rsp) -} - -// GetConnectorWithResponse request returning *GetConnectorResponse -func (c *ClientWithResponses) GetConnectorWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, reqEditors ...RequestEditorFn) (*GetConnectorResponse, error) { - rsp, err := c.GetConnector(ctx, spaceId, connectorId, reqEditors...) - if err != nil { - return nil, err - } - return ParseGetConnectorResponse(rsp) -} - -// UpdateConnectorWithBodyWithResponse request with arbitrary body returning *UpdateConnectorResponse -func (c *ClientWithResponses) UpdateConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *UpdateConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateConnectorResponse, error) { - rsp, err := c.UpdateConnectorWithBody(ctx, spaceId, connectorId, params, contentType, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseUpdateConnectorResponse(rsp) -} - -func (c *ClientWithResponses) UpdateConnectorWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *UpdateConnectorParams, body UpdateConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateConnectorResponse, error) { - rsp, err := c.UpdateConnector(ctx, spaceId, connectorId, params, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseUpdateConnectorResponse(rsp) -} - -// RunConnectorWithBodyWithResponse request with arbitrary body returning *RunConnectorResponse -func (c *ClientWithResponses) RunConnectorWithBodyWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *RunConnectorParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RunConnectorResponse, error) { - rsp, err := c.RunConnectorWithBody(ctx, spaceId, connectorId, params, contentType, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseRunConnectorResponse(rsp) -} - -func (c *ClientWithResponses) RunConnectorWithResponse(ctx context.Context, spaceId SpaceId, connectorId ConnectorId, params *RunConnectorParams, body RunConnectorJSONRequestBody, reqEditors ...RequestEditorFn) (*RunConnectorResponse, error) { - rsp, err := c.RunConnector(ctx, spaceId, connectorId, params, body, reqEditors...) - if err != nil { - return nil, err - } - return ParseRunConnectorResponse(rsp) -} - -// GetConnectorTypesWithResponse request returning *GetConnectorTypesResponse -func (c *ClientWithResponses) GetConnectorTypesWithResponse(ctx context.Context, spaceId SpaceId, params *GetConnectorTypesParams, reqEditors ...RequestEditorFn) (*GetConnectorTypesResponse, error) { - rsp, err := c.GetConnectorTypes(ctx, spaceId, params, reqEditors...) - if err != nil { - return nil, err - } - return ParseGetConnectorTypesResponse(rsp) -} - -// GetConnectorsWithResponse request returning *GetConnectorsResponse -func (c *ClientWithResponses) GetConnectorsWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetConnectorsResponse, error) { - rsp, err := c.GetConnectors(ctx, spaceId, reqEditors...) - if err != nil { - return nil, err - } - return ParseGetConnectorsResponse(rsp) -} - -// LegacyGetConnectorTypesWithResponse request returning *LegacyGetConnectorTypesResponse -func (c *ClientWithResponses) LegacyGetConnectorTypesWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*LegacyGetConnectorTypesResponse, error) { - rsp, err := c.LegacyGetConnectorTypes(ctx, spaceId, reqEditors...) - if err != nil { - return nil, err - } - return ParseLegacyGetConnectorTypesResponse(rsp) -} - -// ParseLegacyGetConnectorsResponse parses an HTTP response from a LegacyGetConnectorsWithResponse call -func ParseLegacyGetConnectorsResponse(rsp *http.Response) (*LegacyGetConnectorsResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &LegacyGetConnectorsResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest []ActionResponseProperties - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - } - - return response, nil -} - -// ParseLegacyCreateConnectorResponse parses an HTTP response from a LegacyCreateConnectorWithResponse call -func ParseLegacyCreateConnectorResponse(rsp *http.Response) (*LegacyCreateConnectorResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &LegacyCreateConnectorResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest N200Actions - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - } - - return response, nil -} - -// ParseLegacyDeleteConnectorResponse parses an HTTP response from a LegacyDeleteConnectorWithResponse call -func ParseLegacyDeleteConnectorResponse(rsp *http.Response) (*LegacyDeleteConnectorResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &LegacyDeleteConnectorResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - } - - return response, nil -} - -// ParseLegacyGetConnectorResponse parses an HTTP response from a LegacyGetConnectorWithResponse call -func ParseLegacyGetConnectorResponse(rsp *http.Response) (*LegacyGetConnectorResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &LegacyGetConnectorResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest N200Actions - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - } - - return response, nil -} - -// ParseLegacyUpdateConnectorResponse parses an HTTP response from a LegacyUpdateConnectorWithResponse call -func ParseLegacyUpdateConnectorResponse(rsp *http.Response) (*LegacyUpdateConnectorResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &LegacyUpdateConnectorResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest N200Actions - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest ObjectNotFoundError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - - } - - return response, nil -} - -// ParseLegacyRunConnectorResponse parses an HTTP response from a LegacyRunConnectorWithResponse call -func ParseLegacyRunConnectorResponse(rsp *http.Response) (*LegacyRunConnectorResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &LegacyRunConnectorResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest LegacyRunConnectorGeneralResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - } - - return response, nil -} - -// ParseCreateConnectorResponse parses an HTTP response from a CreateConnectorWithResponse call -func ParseCreateConnectorResponse(rsp *http.Response) (*CreateConnectorResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &CreateConnectorResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest ConnectorResponseProperties - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest BadRequestError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - } - - return response, nil -} - -// ParseDeleteConnectorResponse parses an HTTP response from a DeleteConnectorWithResponse call -func ParseDeleteConnectorResponse(rsp *http.Response) (*DeleteConnectorResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &DeleteConnectorResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest struct { - Error *string `json:"error,omitempty"` - Message *string `json:"message,omitempty"` - StatusCode *int `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - - } - - return response, nil -} - -// ParseGetConnectorResponse parses an HTTP response from a GetConnectorWithResponse call -func ParseGetConnectorResponse(rsp *http.Response) (*GetConnectorResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &GetConnectorResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest ConnectorResponseProperties - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest struct { - Error *string `json:"error,omitempty"` - Message *string `json:"message,omitempty"` - StatusCode *int `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - - } - - return response, nil -} - -// ParseUpdateConnectorResponse parses an HTTP response from a UpdateConnectorWithResponse call -func ParseUpdateConnectorResponse(rsp *http.Response) (*UpdateConnectorResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &UpdateConnectorResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest ConnectorResponseProperties - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest BadRequestError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest ObjectNotFoundError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - - } - - return response, nil -} - -// ParseRunConnectorResponse parses an HTTP response from a RunConnectorWithResponse call -func ParseRunConnectorResponse(rsp *http.Response) (*RunConnectorResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &RunConnectorResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest RunConnectorGeneralResponse - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - } - - return response, nil -} - -// ParseGetConnectorTypesResponse parses an HTTP response from a GetConnectorTypesWithResponse call -func ParseGetConnectorTypesResponse(rsp *http.Response) (*GetConnectorTypesResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &GetConnectorTypesResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest []struct { - // Enabled Indicates whether the connector type is enabled in Kibana. - Enabled *bool `json:"enabled,omitempty"` - - // EnabledInConfig Indicates whether the connector type is enabled in the Kibana `.yml` file. - EnabledInConfig *bool `json:"enabled_in_config,omitempty"` - - // EnabledInLicense Indicates whether the connector is enabled in the license. - EnabledInLicense *bool `json:"enabled_in_license,omitempty"` - - // Id The type of connector. For example, `.email`, `.index`, `.jira`, `.opsgenie`, or `.server-log`. - Id *ConnectorTypes `json:"id,omitempty"` - - // MinimumLicenseRequired The license that is required to use the connector type. - MinimumLicenseRequired *string `json:"minimum_license_required,omitempty"` - - // Name The name of the connector type. - Name *string `json:"name,omitempty"` - - // SupportedFeatureIds The Kibana features that are supported by the connector type. - SupportedFeatureIds *[]Features `json:"supported_feature_ids,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - } - - return response, nil -} - -// ParseGetConnectorsResponse parses an HTTP response from a GetConnectorsWithResponse call -func ParseGetConnectorsResponse(rsp *http.Response) (*GetConnectorsResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &GetConnectorsResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest []struct { - // Config The configuration for the connector. Configuration properties vary depending on the connector type. - Config *map[string]interface{} `json:"config"` - - // ConnectorTypeId The type of connector. For example, `.email`, `.index`, `.jira`, `.opsgenie`, or `.server-log`. - ConnectorTypeId ConnectorTypes `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector type is deprecated. - IsDeprecated *IsDeprecated `json:"is_deprecated,omitempty"` - - // IsMissingSecrets Indicates whether secrets are missing for the connector. Secrets configuration properties vary depending on the connector type. - IsMissingSecrets *IsMissingSecrets `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether it is a preconfigured connector. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured IsPreconfigured `json:"is_preconfigured"` - - // Name The display name for the connector. - Name string `json:"name"` - - // ReferencedByCount Indicates the number of saved objects that reference the connector. If `is_preconfigured` is true, this value is not calculated. - ReferencedByCount int `json:"referenced_by_count"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - } - - return response, nil -} - -// ParseLegacyGetConnectorTypesResponse parses an HTTP response from a LegacyGetConnectorTypesWithResponse call -func ParseLegacyGetConnectorTypesResponse(rsp *http.Response) (*LegacyGetConnectorTypesResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &LegacyGetConnectorTypesResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest []struct { - // Enabled Indicates whether the connector type is enabled in Kibana. - Enabled *bool `json:"enabled,omitempty"` - - // EnabledInConfig Indicates whether the connector type is enabled in the Kibana `.yml` file. - EnabledInConfig *bool `json:"enabledInConfig,omitempty"` - - // EnabledInLicense Indicates whether the connector is enabled in the license. - EnabledInLicense *bool `json:"enabledInLicense,omitempty"` - - // Id The unique identifier for the connector type. - Id *string `json:"id,omitempty"` - - // MinimumLicenseRequired The license that is required to use the connector type. - MinimumLicenseRequired *string `json:"minimumLicenseRequired,omitempty"` - - // Name The name of the connector type. - Name *string `json:"name,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest AuthorizationError - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - } - - return response, nil -} diff --git a/generated/connectors/connectors.go b/generated/connectors/connectors.go deleted file mode 100644 index 8b753538a..000000000 --- a/generated/connectors/connectors.go +++ /dev/null @@ -1,3 +0,0 @@ -package connectors - -//go:generate go tool github.com/oapi-codegen/oapi-codegen/v2/cmd/oapi-codegen -package connectors -o ./connectors.gen.go -generate "types,client" ./bundled.yaml diff --git a/generated/kbapi/kibana.gen.go b/generated/kbapi/kibana.gen.go index 52a36324e..5eff5b6fb 100644 --- a/generated/kbapi/kibana.gen.go +++ b/generated/kbapi/kibana.gen.go @@ -1,6 +1,6 @@ // Package kbapi provides primitives to interact with the openapi HTTP API. // -// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.4.1 DO NOT EDIT. +// Code generated by github.com/oapi-codegen/oapi-codegen/v2 version v2.5.0 DO NOT EDIT. package kbapi import ( @@ -52,6 +52,71 @@ const ( Inactive AgentPolicyStatus = "inactive" ) +// Defines values for AuthType. +const ( + WebhookAuthenticationBasic AuthType = "webhook-authentication-basic" + WebhookAuthenticationSsl AuthType = "webhook-authentication-ssl" +) + +// Defines values for CasesWebhookConfigCreateCommentMethod. +const ( + CasesWebhookConfigCreateCommentMethodPatch CasesWebhookConfigCreateCommentMethod = "patch" + CasesWebhookConfigCreateCommentMethodPost CasesWebhookConfigCreateCommentMethod = "post" + CasesWebhookConfigCreateCommentMethodPut CasesWebhookConfigCreateCommentMethod = "put" +) + +// Defines values for CasesWebhookConfigCreateIncidentMethod. +const ( + CasesWebhookConfigCreateIncidentMethodPatch CasesWebhookConfigCreateIncidentMethod = "patch" + CasesWebhookConfigCreateIncidentMethodPost CasesWebhookConfigCreateIncidentMethod = "post" + CasesWebhookConfigCreateIncidentMethodPut CasesWebhookConfigCreateIncidentMethod = "put" +) + +// Defines values for CasesWebhookConfigUpdateIncidentMethod. +const ( + CasesWebhookConfigUpdateIncidentMethodPatch CasesWebhookConfigUpdateIncidentMethod = "patch" + CasesWebhookConfigUpdateIncidentMethodPost CasesWebhookConfigUpdateIncidentMethod = "post" + CasesWebhookConfigUpdateIncidentMethodPut CasesWebhookConfigUpdateIncidentMethod = "put" +) + +// Defines values for CertType. +const ( + SslCrtKey CertType = "ssl-crt-key" + SslPfx CertType = "ssl-pfx" +) + +// Defines values for EmailConfigService. +const ( + EmailConfigServiceElasticCloud EmailConfigService = "elastic_cloud" + EmailConfigServiceExchangeServer EmailConfigService = "exchange_server" + EmailConfigServiceGmail EmailConfigService = "gmail" + EmailConfigServiceOther EmailConfigService = "other" + EmailConfigServiceOutlook365 EmailConfigService = "outlook365" + EmailConfigServiceSes EmailConfigService = "ses" +) + +// Defines values for GenaiAzureConfigApiProvider. +const ( + AzureOpenAI GenaiAzureConfigApiProvider = "Azure OpenAI" +) + +// Defines values for GenaiOpenaiConfigApiProvider. +const ( + OpenAI GenaiOpenaiConfigApiProvider = "OpenAI" +) + +// Defines values for GenaiOpenaiOtherConfigApiProvider. +const ( + GenaiOpenaiOtherConfigApiProviderOther GenaiOpenaiOtherConfigApiProvider = "Other" +) + +// Defines values for GenaiOpenaiOtherConfigVerificationMode. +const ( + GenaiOpenaiOtherConfigVerificationModeCertificate GenaiOpenaiOtherConfigVerificationMode = "certificate" + GenaiOpenaiOtherConfigVerificationModeFull GenaiOpenaiOtherConfigVerificationMode = "full" + GenaiOpenaiOtherConfigVerificationModeNone GenaiOpenaiOtherConfigVerificationMode = "none" +) + // Defines values for NewOutputElasticsearchPreset. const ( NewOutputElasticsearchPresetBalanced NewOutputElasticsearchPreset = "balanced" @@ -415,6 +480,13 @@ const ( ServerHostSslClientAuthRequired ServerHostSslClientAuth = "required" ) +// Defines values for SwimlaneConfigConnectorType. +const ( + Alerts SwimlaneConfigConnectorType = "alerts" + All SwimlaneConfigConnectorType = "all" + Cases SwimlaneConfigConnectorType = "cases" +) + // Defines values for UpdateOutputElasticsearchPreset. const ( UpdateOutputElasticsearchPresetBalanced UpdateOutputElasticsearchPreset = "balanced" @@ -498,6 +570,19 @@ const ( UpdateOutputSslVerificationModeStrict UpdateOutputSslVerificationMode = "strict" ) +// Defines values for VerificationMode. +const ( + VerificationModeCertificate VerificationMode = "certificate" + VerificationModeFull VerificationMode = "full" + VerificationModeNone VerificationMode = "none" +) + +// Defines values for WebhookConfigMethod. +const ( + WebhookConfigMethodPost WebhookConfigMethod = "post" + WebhookConfigMethodPut WebhookConfigMethod = "put" +) + // Defines values for APMUIElasticApiVersion. const ( APMUIElasticApiVersionN20231031 APMUIElasticApiVersion = "2023-10-31" @@ -811,7 +896,7 @@ type DataViewsDataViewResponseObjectInner struct { Title *DataViewsTitle `json:"title,omitempty"` // TypeMeta When you use rollup indices, contains the field list for the rollup data view API endpoints. - TypeMeta *DataViewsTypemetaResponse `json:"typeMeta"` + TypeMeta *DataViewsTypemetaResponse `json:"typeMeta,omitempty"` Version *string `json:"version,omitempty"` } @@ -1023,15 +1108,15 @@ type SyntheticsPostParameterResponse struct { // AgentPolicy defines model for agent_policy. type AgentPolicy struct { AdvancedSettings *struct { - AgentDownloadTargetDirectory *interface{} `json:"agent_download_target_directory"` - AgentDownloadTimeout *interface{} `json:"agent_download_timeout"` - AgentLimitsGoMaxProcs *interface{} `json:"agent_limits_go_max_procs"` - AgentLoggingFilesInterval *interface{} `json:"agent_logging_files_interval"` - AgentLoggingFilesKeepfiles *interface{} `json:"agent_logging_files_keepfiles"` - AgentLoggingFilesRotateeverybytes *interface{} `json:"agent_logging_files_rotateeverybytes"` - AgentLoggingLevel *interface{} `json:"agent_logging_level"` - AgentLoggingMetricsPeriod *interface{} `json:"agent_logging_metrics_period"` - AgentLoggingToFiles *interface{} `json:"agent_logging_to_files"` + AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory,omitempty"` + AgentDownloadTimeout interface{} `json:"agent_download_timeout,omitempty"` + AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs,omitempty"` + AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval,omitempty"` + AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles,omitempty"` + AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes,omitempty"` + AgentLoggingLevel interface{} `json:"agent_logging_level,omitempty"` + AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period,omitempty"` + AgentLoggingToFiles interface{} `json:"agent_logging_to_files,omitempty"` } `json:"advanced_settings,omitempty"` AgentFeatures *[]struct { Enabled bool `json:"enabled"` @@ -1050,10 +1135,10 @@ type AgentPolicy struct { } `json:"resources,omitempty"` } `json:"agentless,omitempty"` Agents *float32 `json:"agents,omitempty"` - DataOutputId *string `json:"data_output_id"` + DataOutputId *string `json:"data_output_id,omitempty"` Description *string `json:"description,omitempty"` - DownloadSourceId *string `json:"download_source_id"` - FleetServerHostId *string `json:"fleet_server_host_id"` + DownloadSourceId *string `json:"download_source_id,omitempty"` + FleetServerHostId *string `json:"fleet_server_host_id,omitempty"` // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. GlobalDataTags *[]AgentPolicyGlobalDataTagsItem `json:"global_data_tags,omitempty"` @@ -1069,7 +1154,7 @@ type AgentPolicy struct { IsProtected bool `json:"is_protected"` // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled - KeepMonitoringAlive *bool `json:"keep_monitoring_alive"` + KeepMonitoringAlive *bool `json:"keep_monitoring_alive,omitempty"` MonitoringDiagnostics *struct { Limit *struct { Burst *float32 `json:"burst,omitempty"` @@ -1090,13 +1175,13 @@ type AgentPolicy struct { Host *string `json:"host,omitempty"` Port *float32 `json:"port,omitempty"` } `json:"monitoring_http,omitempty"` - MonitoringOutputId *string `json:"monitoring_output_id"` + MonitoringOutputId *string `json:"monitoring_output_id,omitempty"` MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` Name string `json:"name"` Namespace string `json:"namespace"` // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. - Overrides *map[string]interface{} `json:"overrides"` + Overrides *map[string]interface{} `json:"overrides,omitempty"` PackagePolicies *AgentPolicy_PackagePolicies `json:"package_policies,omitempty"` RequiredVersions *[]struct { // Percentage Target percentage of agents to auto upgrade @@ -1104,14 +1189,14 @@ type AgentPolicy struct { // Version Target version for automatic agent upgrade Version string `json:"version"` - } `json:"required_versions"` + } `json:"required_versions,omitempty"` Revision float32 `json:"revision"` SchemaVersion *string `json:"schema_version,omitempty"` SpaceIds *[]string `json:"space_ids,omitempty"` Status AgentPolicyStatus `json:"status"` // SupportsAgentless Indicates whether the agent policy supports agentless integrations. - SupportsAgentless *bool `json:"supports_agentless"` + SupportsAgentless *bool `json:"supports_agentless,omitempty"` UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` UnprivilegedAgents *float32 `json:"unprivileged_agents,omitempty"` UpdatedAt string `json:"updated_at"` @@ -1128,7 +1213,7 @@ type AgentPolicyPackagePolicies0 = []string // AgentPolicyPackagePolicies1 This field is present only when retrieving a single agent policy, or when retrieving a list of agent policies with the ?full=true parameter type AgentPolicyPackagePolicies1 = []struct { // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. - AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions"` + AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions,omitempty"` Agents *float32 `json:"agents,omitempty"` CreatedAt string `json:"created_at"` CreatedBy string `json:"created_by"` @@ -1146,12 +1231,12 @@ type AgentPolicyPackagePolicies1 = []struct { // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. Namespace *string `json:"namespace,omitempty"` - OutputId *string `json:"output_id"` + OutputId *string `json:"output_id,omitempty"` // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. Overrides *struct { Inputs *map[string]interface{} `json:"inputs,omitempty"` - } `json:"overrides"` + } `json:"overrides,omitempty"` Package *struct { ExperimentalDataStreamFeatures *[]struct { DataStream string `json:"data_stream"` @@ -1173,8 +1258,8 @@ type AgentPolicyPackagePolicies1 = []struct { } `json:"package,omitempty"` // PolicyId Agent policy ID where that package policy will be added - // Deprecated: - PolicyId *string `json:"policy_id"` + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id,omitempty"` PolicyIds *[]string `json:"policy_ids,omitempty"` Revision float32 `json:"revision"` SecretReferences *[]struct { @@ -1183,7 +1268,7 @@ type AgentPolicyPackagePolicies1 = []struct { SpaceIds *[]string `json:"spaceIds,omitempty"` // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. - SupportsAgentless *bool `json:"supports_agentless"` + SupportsAgentless *bool `json:"supports_agentless,omitempty"` UpdatedAt string `json:"updated_at"` UpdatedBy string `json:"updated_by"` Vars *AgentPolicy_PackagePolicies_1_Vars `json:"vars,omitempty"` @@ -1402,6 +1487,158 @@ type AgentPolicyGlobalDataTagsItem_Value struct { union json.RawMessage } +// AuthType The type of authentication to use: basic, SSL, or none. +type AuthType string + +// BedrockConfig Defines properties for connectors when type is `.bedrock`. +type BedrockConfig struct { + // ApiUrl The Amazon Bedrock request URL. + ApiUrl string `json:"apiUrl"` + + // DefaultModel The generative artificial intelligence model for Amazon Bedrock to use. Current support is for the Anthropic Claude models. + DefaultModel *string `json:"defaultModel,omitempty"` +} + +// BedrockSecrets Defines secrets for connectors when type is `.bedrock`. +type BedrockSecrets struct { + // AccessKey The AWS access key for authentication. + AccessKey string `json:"accessKey"` + + // Secret The AWS secret for authentication. + Secret string `json:"secret"` +} + +// Ca A base64 encoded version of the certificate authority file that the connector can trust to sign and validate certificates. This option is available for all authentication types. +type Ca = string + +// CasesWebhookConfig Defines properties for connectors when type is `.cases-webhook`. +type CasesWebhookConfig struct { + // AuthType The type of authentication to use: basic, SSL, or none. + AuthType *AuthType `json:"authType,omitempty"` + + // Ca A base64 encoded version of the certificate authority file that the connector can trust to sign and validate certificates. This option is available for all authentication types. + Ca *Ca `json:"ca,omitempty"` + + // CertType If the `authType` is `webhook-authentication-ssl`, specifies whether the certificate authentication data is in a CRT and key file format or a PFX file format. + CertType *CertType `json:"certType,omitempty"` + + // CreateCommentJson A JSON payload sent to the create comment URL to create a case comment. You can use variables to add Kibana Cases data to the payload. The required variable is `case.comment`. Due to Mustache template variables (the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated once the Mustache variables have been placed when the REST method runs. Manually ensure that the JSON is valid, disregarding the Mustache variables, so the later validation will pass. + CreateCommentJson *string `json:"createCommentJson,omitempty"` + + // CreateCommentMethod The REST API HTTP request method to create a case comment in the third-party system. Valid values are `patch`, `post`, and `put`. + CreateCommentMethod *CasesWebhookConfigCreateCommentMethod `json:"createCommentMethod,omitempty"` + + // CreateCommentUrl The REST API URL to create a case comment by ID in the third-party system. You can use a variable to add the external system ID to the URL. If you are using the `xpack.actions.allowedHosts setting`, add the hostname to the allowed hosts. + CreateCommentUrl *string `json:"createCommentUrl,omitempty"` + + // CreateIncidentJson A JSON payload sent to the create case URL to create a case. You can use variables to add case data to the payload. Required variables are `case.title` and `case.description`. Due to Mustache template variables (which is the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid to avoid future validation errors; disregard Mustache variables during your review. + CreateIncidentJson string `json:"createIncidentJson"` + + // CreateIncidentMethod The REST API HTTP request method to create a case in the third-party system. Valid values are `patch`, `post`, and `put`. + CreateIncidentMethod *CasesWebhookConfigCreateIncidentMethod `json:"createIncidentMethod,omitempty"` + + // CreateIncidentResponseKey The JSON key in the create external case response that contains the case ID. + CreateIncidentResponseKey string `json:"createIncidentResponseKey"` + + // CreateIncidentUrl The REST API URL to create a case in the third-party system. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + CreateIncidentUrl string `json:"createIncidentUrl"` + + // GetIncidentResponseExternalTitleKey The JSON key in get external case response that contains the case title. + GetIncidentResponseExternalTitleKey string `json:"getIncidentResponseExternalTitleKey"` + + // GetIncidentUrl The REST API URL to get the case by ID from the third-party system. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. You can use a variable to add the external system ID to the URL. Due to Mustache template variables (the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid, disregarding the Mustache variables, so the later validation will pass. + GetIncidentUrl string `json:"getIncidentUrl"` + + // HasAuth If true, a username and password for login type authentication must be provided. + HasAuth *HasAuth `json:"hasAuth,omitempty"` + + // Headers A set of key-value pairs sent as headers with the request URLs for the create case, update case, get case, and create comment methods. + Headers *string `json:"headers,omitempty"` + + // UpdateIncidentJson The JSON payload sent to the update case URL to update the case. You can use variables to add Kibana Cases data to the payload. Required variables are `case.title` and `case.description`. Due to Mustache template variables (which is the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid to avoid future validation errors; disregard Mustache variables during your review. + UpdateIncidentJson string `json:"updateIncidentJson"` + + // UpdateIncidentMethod The REST API HTTP request method to update the case in the third-party system. Valid values are `patch`, `post`, and `put`. + UpdateIncidentMethod *CasesWebhookConfigUpdateIncidentMethod `json:"updateIncidentMethod,omitempty"` + + // UpdateIncidentUrl The REST API URL to update the case by ID in the third-party system. You can use a variable to add the external system ID to the URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + UpdateIncidentUrl string `json:"updateIncidentUrl"` + + // VerificationMode Controls the verification of certificates. Use `full` to validate that the certificate has an issue date within the `not_before` and `not_after` dates, chains to a trusted certificate authority (CA), and has a hostname or IP address that matches the names within the certificate. Use `certificate` to validate the certificate and verify that it is signed by a trusted authority; this option does not check the certificate hostname. Use `none` to skip certificate validation. + VerificationMode *VerificationMode `json:"verificationMode,omitempty"` + + // ViewIncidentUrl The URL to view the case in the external system. You can use variables to add the external system ID or external system title to the URL. + ViewIncidentUrl string `json:"viewIncidentUrl"` +} + +// CasesWebhookConfigCreateCommentMethod The REST API HTTP request method to create a case comment in the third-party system. Valid values are `patch`, `post`, and `put`. +type CasesWebhookConfigCreateCommentMethod string + +// CasesWebhookConfigCreateIncidentMethod The REST API HTTP request method to create a case in the third-party system. Valid values are `patch`, `post`, and `put`. +type CasesWebhookConfigCreateIncidentMethod string + +// CasesWebhookConfigUpdateIncidentMethod The REST API HTTP request method to update the case in the third-party system. Valid values are `patch`, `post`, and `put`. +type CasesWebhookConfigUpdateIncidentMethod string + +// CasesWebhookSecrets defines model for cases_webhook_secrets. +type CasesWebhookSecrets struct { + // Crt If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the CRT or CERT file. + Crt *Crt `json:"crt,omitempty"` + + // Key If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the KEY file. + Key *Key `json:"key,omitempty"` + + // Password The password for HTTP basic authentication. If `hasAuth` is set to `true` and and `authType` is `webhook-authentication-basic`, this property is required. + Password *string `json:"password,omitempty"` + + // Pfx If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-pfx`, it is a base64 encoded version of the PFX or P12 file. + Pfx *Pfx `json:"pfx,omitempty"` + + // User The username for HTTP basic authentication. If `hasAuth` is set to `true` and `authType` is `webhook-authentication-basic`, this property is required. + User *string `json:"user,omitempty"` +} + +// CertType If the `authType` is `webhook-authentication-ssl`, specifies whether the certificate authentication data is in a CRT and key file format or a PFX file format. +type CertType string + +// ConnectorResponse defines model for connector_response. +type ConnectorResponse struct { + Config *map[string]interface{} `json:"config,omitempty"` + + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` + + // Id The identifier for the connector. + Id string `json:"id"` + + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` + + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` + + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` +} + +// CreateConnectorConfig The connector configuration details. +type CreateConnectorConfig struct { + AdditionalProperties map[string]interface{} `json:"-"` + union json.RawMessage +} + +// CreateConnectorSecrets defines model for create_connector_secrets. +type CreateConnectorSecrets struct { + AdditionalProperties map[string]interface{} `json:"-"` + union json.RawMessage +} + // CreateParamResponse defines model for create_param_response. type CreateParamResponse struct { union json.RawMessage @@ -1410,6 +1647,103 @@ type CreateParamResponse struct { // CreateParamResponse0 defines model for . type CreateParamResponse0 = []SyntheticsPostParameterResponse +// CrowdstrikeConfig Defines config properties for connectors when type is `.crowdstrike`. +type CrowdstrikeConfig struct { + // Url The CrowdStrike tenant URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + Url string `json:"url"` +} + +// CrowdstrikeSecrets Defines secrets for connectors when type is `.crowdstrike`. +type CrowdstrikeSecrets struct { + // ClientId The CrowdStrike API client identifier. + ClientId string `json:"clientId"` + + // ClientSecret The CrowdStrike API client secret to authenticate the `clientId`. + ClientSecret string `json:"clientSecret"` +} + +// Crt If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the CRT or CERT file. +type Crt = string + +// D3securityConfig Defines properties for connectors when type is `.d3security`. +type D3securityConfig struct { + // Url The D3 Security API request URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + Url string `json:"url"` +} + +// D3securitySecrets Defines secrets for connectors when type is `.d3security`. +type D3securitySecrets struct { + // Token The D3 Security token. + Token string `json:"token"` +} + +// DefenderConfig Defines properties for connectors when type is `.microsoft_defender_endpoint`. +type DefenderConfig struct { + // ApiUrl The URL of the Microsoft Defender for Endpoint API. If you are using the `xpack.actions.allowedHosts` setting, make sure the hostname is added to the allowed hosts. + ApiUrl string `json:"apiUrl"` + + // ClientId The application (client) identifier for your app in the Azure portal. + ClientId *string `json:"clientId,omitempty"` + + // OAuthScope The OAuth scopes or permission sets for the Microsoft Defender for Endpoint API. + OAuthScope *string `json:"oAuthScope,omitempty"` + + // OAuthServerUrl The OAuth server URL where authentication is sent and received for the Microsoft Defender for Endpoint API. + OAuthServerUrl *string `json:"oAuthServerUrl,omitempty"` + + // TenantId The tenant identifier for your app in the Azure portal. + TenantId *string `json:"tenantId,omitempty"` +} + +// DefenderSecrets Defines secrets for connectors when type is `..microsoft_defender_endpoint`. +type DefenderSecrets struct { + // ClientSecret The client secret for your app in the Azure portal. + ClientSecret string `json:"clientSecret"` +} + +// EmailConfig Defines properties for connectors when type is `.email`. +type EmailConfig struct { + // ClientId The client identifier, which is a part of OAuth 2.0 client credentials authentication, in GUID format. If `service` is `exchange_server`, this property is required. + ClientId *string `json:"clientId,omitempty"` + + // From The from address for all emails sent by the connector. It must be specified in `user@host-name` format. + From string `json:"from"` + + // HasAuth Specifies whether a user and password are required inside the secrets configuration. + HasAuth *bool `json:"hasAuth,omitempty"` + + // Host The host name of the service provider. If the `service` is `elastic_cloud` (for Elastic Cloud notifications) or one of Nodemailer's well-known email service providers, this property is ignored. If `service` is `other`, this property must be defined. + Host *string `json:"host,omitempty"` + OauthTokenUrl *string `json:"oauthTokenUrl,omitempty"` + + // Port The port to connect to on the service provider. If the `service` is `elastic_cloud` (for Elastic Cloud notifications) or one of Nodemailer's well-known email service providers, this property is ignored. If `service` is `other`, this property must be defined. + Port *int `json:"port,omitempty"` + + // Secure Specifies whether the connection to the service provider will use TLS. If the `service` is `elastic_cloud` (for Elastic Cloud notifications) or one of Nodemailer's well-known email service providers, this property is ignored. + Secure *bool `json:"secure,omitempty"` + + // Service The name of the email service. + Service *EmailConfigService `json:"service,omitempty"` + + // TenantId The tenant identifier, which is part of OAuth 2.0 client credentials authentication, in GUID format. If `service` is `exchange_server`, this property is required. + TenantId *string `json:"tenantId,omitempty"` +} + +// EmailConfigService The name of the email service. +type EmailConfigService string + +// EmailSecrets Defines secrets for connectors when type is `.email`. +type EmailSecrets struct { + // ClientSecret The Microsoft Exchange Client secret for OAuth 2.0 client credentials authentication. It must be URL-encoded. If `service` is `exchange_server`, this property is required. + ClientSecret *string `json:"clientSecret,omitempty"` + + // Password The password for HTTP basic authentication. If `hasAuth` is set to `true`, this property is required. + Password *string `json:"password,omitempty"` + + // User The username for HTTP basic authentication. If `hasAuth` is set to `true`, this property is required. + User *string `json:"user,omitempty"` +} + // EnrollmentApiKey defines model for enrollment_api_key. type EnrollmentApiKey struct { // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. @@ -1431,6 +1765,102 @@ type EnrollmentApiKey struct { PolicyId *string `json:"policy_id,omitempty"` } +// GeminiConfig Defines properties for connectors when type is `.gemini`. +type GeminiConfig struct { + // ApiUrl The Google Gemini request URL. + ApiUrl string `json:"apiUrl"` + + // DefaultModel The generative artificial intelligence model for Google Gemini to use. + DefaultModel *string `json:"defaultModel,omitempty"` + + // GcpProjectID The Google ProjectID that has Vertex AI endpoint enabled. + GcpProjectID string `json:"gcpProjectID"` + + // GcpRegion The GCP region where the Vertex AI endpoint enabled. + GcpRegion string `json:"gcpRegion"` +} + +// GeminiSecrets Defines secrets for connectors when type is `.gemini`. +type GeminiSecrets struct { + // CredentialsJson The service account credentials JSON file. The service account should have Vertex AI user IAM role assigned to it. + CredentialsJson string `json:"credentialsJson"` +} + +// GenaiAzureConfig Defines properties for connectors when type is `.gen-ai` and the API provider is `Azure OpenAI`. +type GenaiAzureConfig struct { + // ApiProvider The OpenAI API provider. + ApiProvider GenaiAzureConfigApiProvider `json:"apiProvider"` + + // ApiUrl The OpenAI API endpoint. + ApiUrl string `json:"apiUrl"` +} + +// GenaiAzureConfigApiProvider The OpenAI API provider. +type GenaiAzureConfigApiProvider string + +// GenaiOpenaiConfig Defines properties for connectors when type is `.gen-ai` and the API provider is `OpenAI`. +type GenaiOpenaiConfig struct { + // ApiProvider The OpenAI API provider. + ApiProvider GenaiOpenaiConfigApiProvider `json:"apiProvider"` + + // ApiUrl The OpenAI API endpoint. + ApiUrl string `json:"apiUrl"` + + // DefaultModel The default model to use for requests. + DefaultModel *string `json:"defaultModel,omitempty"` +} + +// GenaiOpenaiConfigApiProvider The OpenAI API provider. +type GenaiOpenaiConfigApiProvider string + +// GenaiOpenaiOtherConfig Defines properties for connectors when type is `.gen-ai` and the API provider is `Other` (OpenAI-compatible service), including optional PKI authentication. +type GenaiOpenaiOtherConfig struct { + // ApiProvider The OpenAI API provider. + ApiProvider GenaiOpenaiOtherConfigApiProvider `json:"apiProvider"` + + // ApiUrl The OpenAI-compatible API endpoint. + ApiUrl string `json:"apiUrl"` + + // CaData PEM-encoded CA certificate content. + CaData *string `json:"caData,omitempty"` + + // CertificateData PEM-encoded certificate content. + CertificateData *string `json:"certificateData,omitempty"` + + // DefaultModel The default model to use for requests. + DefaultModel string `json:"defaultModel"` + + // Headers Custom headers to include in requests. + Headers *map[string]string `json:"headers,omitempty"` + + // PrivateKeyData PEM-encoded private key content. + PrivateKeyData *string `json:"privateKeyData,omitempty"` + + // VerificationMode SSL verification mode for PKI authentication. + VerificationMode *GenaiOpenaiOtherConfigVerificationMode `json:"verificationMode,omitempty"` +} + +// GenaiOpenaiOtherConfigApiProvider The OpenAI API provider. +type GenaiOpenaiOtherConfigApiProvider string + +// GenaiOpenaiOtherConfigVerificationMode SSL verification mode for PKI authentication. +type GenaiOpenaiOtherConfigVerificationMode string + +// GenaiSecrets Defines secrets for connectors when type is `.gen-ai`. Supports both API key authentication (OpenAI, Azure OpenAI, and `Other`) and PKI authentication (`Other` provider only). PKI fields must be base64-encoded PEM content. +type GenaiSecrets struct { + // ApiKey The API key for authentication. For OpenAI and Azure OpenAI providers, it is required. For the `Other` provider, it is required if you do not use PKI authentication. With PKI, you can also optionally include an API key if the OpenAI-compatible service supports or requires one. + ApiKey *string `json:"apiKey,omitempty"` + + // CaData Base64-encoded PEM CA certificate content for PKI authentication (Other provider only). Optional. + CaData *string `json:"caData,omitempty"` + + // CertificateData Base64-encoded PEM certificate content for PKI authentication (Other provider only). Required for PKI. + CertificateData *string `json:"certificateData,omitempty"` + + // PrivateKeyData Base64-encoded PEM private key content for PKI authentication (Other provider only). Required for PKI. + PrivateKeyData *string `json:"privateKeyData,omitempty"` +} + // GetDataViewsResponseItem defines model for get_data_views_response_item. type GetDataViewsResponseItem struct { Id *string `json:"id,omitempty"` @@ -1440,12 +1870,48 @@ type GetDataViewsResponseItem struct { TypeMeta *map[string]interface{} `json:"typeMeta,omitempty"` } +// HasAuth If true, a username and password for login type authentication must be provided. +type HasAuth = bool + +// IndexConfig Defines properties for connectors when type is `.index`. +type IndexConfig struct { + // ExecutionTimeField A field that indicates when the document was indexed. + ExecutionTimeField *string `json:"executionTimeField,omitempty"` + + // Index The Elasticsearch index to be written to. + Index string `json:"index"` + + // Refresh The refresh policy for the write request, which affects when changes are made visible to search. Refer to the refresh setting for Elasticsearch document APIs. + Refresh *bool `json:"refresh,omitempty"` +} + +// JiraConfig Defines properties for connectors when type is `.jira`. +type JiraConfig struct { + // ApiUrl The Jira instance URL. + ApiUrl string `json:"apiUrl"` + + // ProjectKey The Jira project key. + ProjectKey string `json:"projectKey"` +} + +// JiraSecrets Defines secrets for connectors when type is `.jira`. +type JiraSecrets struct { + // ApiToken The Jira API authentication token for HTTP basic authentication. + ApiToken string `json:"apiToken"` + + // Email The account email for HTTP Basic authentication. + Email string `json:"email"` +} + +// Key If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the KEY file. +type Key = string + // NewOutputElasticsearch defines model for new_output_elasticsearch. type NewOutputElasticsearch struct { AllowEdit *[]string `json:"allow_edit,omitempty"` CaSha256 *string `json:"ca_sha256,omitempty"` CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml"` + ConfigYaml *string `json:"config_yaml,omitempty"` Hosts []string `json:"hosts"` Id *string `json:"id,omitempty"` IsDefault *bool `json:"is_default,omitempty"` @@ -1494,7 +1960,7 @@ type NewOutputKafka struct { ClientId *string `json:"client_id,omitempty"` Compression *NewOutputKafkaCompression `json:"compression,omitempty"` CompressionLevel interface{} `json:"compression_level"` - ConfigYaml *string `json:"config_yaml"` + ConfigYaml *string `json:"config_yaml,omitempty"` ConnectionType interface{} `json:"connection_type"` Hash *struct { Hash *string `json:"hash,omitempty"` @@ -1524,7 +1990,7 @@ type NewOutputKafka struct { } `json:"round_robin,omitempty"` Sasl *struct { Mechanism *NewOutputKafkaSaslMechanism `json:"mechanism,omitempty"` - } `json:"sasl"` + } `json:"sasl,omitempty"` Secrets *struct { Password *NewOutputKafka_Secrets_Password `json:"password,omitempty"` Ssl *struct { @@ -1589,7 +2055,7 @@ type NewOutputLogstash struct { AllowEdit *[]string `json:"allow_edit,omitempty"` CaSha256 *string `json:"ca_sha256,omitempty"` CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml"` + ConfigYaml *string `json:"config_yaml,omitempty"` Hosts []string `json:"hosts"` Id *string `json:"id,omitempty"` IsDefault *bool `json:"is_default,omitempty"` @@ -1629,15 +2095,15 @@ type NewOutputRemoteElasticsearch struct { AllowEdit *[]string `json:"allow_edit,omitempty"` CaSha256 *string `json:"ca_sha256,omitempty"` CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml"` + ConfigYaml *string `json:"config_yaml,omitempty"` Hosts []string `json:"hosts"` Id *string `json:"id,omitempty"` IsDefault *bool `json:"is_default,omitempty"` IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` IsInternal *bool `json:"is_internal,omitempty"` IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - KibanaApiKey *string `json:"kibana_api_key"` - KibanaUrl *string `json:"kibana_url"` + KibanaApiKey *string `json:"kibana_api_key,omitempty"` + KibanaUrl *string `json:"kibana_url,omitempty"` Name string `json:"name"` Preset *NewOutputRemoteElasticsearchPreset `json:"preset,omitempty"` ProxyId *string `json:"proxy_id,omitempty"` @@ -1647,7 +2113,7 @@ type NewOutputRemoteElasticsearch struct { Key *NewOutputRemoteElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` } `json:"ssl,omitempty"` } `json:"secrets,omitempty"` - ServiceToken *string `json:"service_token"` + ServiceToken *string `json:"service_token,omitempty"` Shipper *NewOutputShipper `json:"shipper,omitempty"` Ssl *NewOutputSsl `json:"ssl,omitempty"` SyncIntegrations *bool `json:"sync_integrations,omitempty"` @@ -1689,16 +2155,16 @@ type NewOutputRemoteElasticsearchType string // NewOutputShipper defines model for new_output_shipper. type NewOutputShipper struct { - CompressionLevel *float32 `json:"compression_level"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled"` - DiskQueueEnabled *bool `json:"disk_queue_enabled"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size"` - DiskQueuePath *string `json:"disk_queue_path"` - Loadbalance *bool `json:"loadbalance"` - MaxBatchBytes *float32 `json:"max_batch_bytes"` - MemQueueEvents *float32 `json:"mem_queue_events"` - QueueFlushTimeout *float32 `json:"queue_flush_timeout"` + CompressionLevel *float32 `json:"compression_level,omitempty"` + DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` + DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` + DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` + DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` + DiskQueuePath *string `json:"disk_queue_path,omitempty"` + Loadbalance *bool `json:"loadbalance,omitempty"` + MaxBatchBytes *float32 `json:"max_batch_bytes,omitempty"` + MemQueueEvents *float32 `json:"mem_queue_events,omitempty"` + QueueFlushTimeout *float32 `json:"queue_flush_timeout,omitempty"` } // NewOutputSsl defines model for new_output_ssl. @@ -1717,12 +2183,24 @@ type NewOutputUnion struct { union json.RawMessage } +// OpsgenieConfig Defines properties for connectors when type is `.opsgenie`. +type OpsgenieConfig struct { + // ApiUrl The Opsgenie URL. For example, `https://api.opsgenie.com` or `https://api.eu.opsgenie.com`. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + ApiUrl string `json:"apiUrl"` +} + +// OpsgenieSecrets Defines secrets for connectors when type is `.opsgenie`. +type OpsgenieSecrets struct { + // ApiKey The Opsgenie API authentication key for HTTP Basic authentication. + ApiKey string `json:"apiKey"` +} + // OutputElasticsearch defines model for output_elasticsearch. type OutputElasticsearch struct { AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint"` - ConfigYaml *string `json:"config_yaml"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` Hosts []string `json:"hosts"` Id *string `json:"id,omitempty"` IsDefault *bool `json:"is_default,omitempty"` @@ -1731,10 +2209,10 @@ type OutputElasticsearch struct { IsPreconfigured *bool `json:"is_preconfigured,omitempty"` Name string `json:"name"` Preset *OutputElasticsearchPreset `json:"preset,omitempty"` - ProxyId *string `json:"proxy_id"` + ProxyId *string `json:"proxy_id,omitempty"` Secrets *OutputElasticsearch_Secrets `json:"secrets,omitempty"` - Shipper *OutputShipper `json:"shipper"` - Ssl *OutputSsl `json:"ssl"` + Shipper *OutputShipper `json:"shipper,omitempty"` + Ssl *OutputSsl `json:"ssl,omitempty"` Type OutputElasticsearchType `json:"type"` AdditionalProperties map[string]interface{} `json:"-"` } @@ -1776,12 +2254,12 @@ type OutputKafka struct { AllowEdit *[]string `json:"allow_edit,omitempty"` AuthType OutputKafkaAuthType `json:"auth_type"` BrokerTimeout *float32 `json:"broker_timeout,omitempty"` - CaSha256 *string `json:"ca_sha256"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` ClientId *string `json:"client_id,omitempty"` Compression *OutputKafkaCompression `json:"compression,omitempty"` CompressionLevel interface{} `json:"compression_level"` - ConfigYaml *string `json:"config_yaml"` + ConfigYaml *string `json:"config_yaml,omitempty"` ConnectionType interface{} `json:"connection_type"` Hash *OutputKafka_Hash `json:"hash,omitempty"` Headers *[]OutputKafka_Headers_Item `json:"headers,omitempty"` @@ -1795,14 +2273,14 @@ type OutputKafka struct { Name string `json:"name"` Partition *OutputKafkaPartition `json:"partition,omitempty"` Password interface{} `json:"password"` - ProxyId *string `json:"proxy_id"` + ProxyId *string `json:"proxy_id,omitempty"` Random *OutputKafka_Random `json:"random,omitempty"` RequiredAcks *OutputKafkaRequiredAcks `json:"required_acks,omitempty"` RoundRobin *OutputKafka_RoundRobin `json:"round_robin,omitempty"` - Sasl *OutputKafka_Sasl `json:"sasl"` + Sasl *OutputKafka_Sasl `json:"sasl,omitempty"` Secrets *OutputKafka_Secrets `json:"secrets,omitempty"` - Shipper *OutputShipper `json:"shipper"` - Ssl *OutputSsl `json:"ssl"` + Shipper *OutputShipper `json:"shipper,omitempty"` + Ssl *OutputSsl `json:"ssl,omitempty"` Timeout *float32 `json:"timeout,omitempty"` Topic *string `json:"topic,omitempty"` Type OutputKafkaType `json:"type"` @@ -1905,9 +2383,9 @@ type OutputKafkaType string // OutputLogstash defines model for output_logstash. type OutputLogstash struct { AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint"` - ConfigYaml *string `json:"config_yaml"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` Hosts []string `json:"hosts"` Id *string `json:"id,omitempty"` IsDefault *bool `json:"is_default,omitempty"` @@ -1915,10 +2393,10 @@ type OutputLogstash struct { IsInternal *bool `json:"is_internal,omitempty"` IsPreconfigured *bool `json:"is_preconfigured,omitempty"` Name string `json:"name"` - ProxyId *string `json:"proxy_id"` + ProxyId *string `json:"proxy_id,omitempty"` Secrets *OutputLogstash_Secrets `json:"secrets,omitempty"` - Shipper *OutputShipper `json:"shipper"` - Ssl *OutputSsl `json:"ssl"` + Shipper *OutputShipper `json:"shipper,omitempty"` + Ssl *OutputSsl `json:"ssl,omitempty"` Type OutputLogstashType `json:"type"` AdditionalProperties map[string]interface{} `json:"-"` } @@ -1955,24 +2433,24 @@ type OutputLogstashType string // OutputRemoteElasticsearch defines model for output_remote_elasticsearch. type OutputRemoteElasticsearch struct { AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint"` - ConfigYaml *string `json:"config_yaml"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` Hosts []string `json:"hosts"` Id *string `json:"id,omitempty"` IsDefault *bool `json:"is_default,omitempty"` IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` IsInternal *bool `json:"is_internal,omitempty"` IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - KibanaApiKey *string `json:"kibana_api_key"` - KibanaUrl *string `json:"kibana_url"` + KibanaApiKey *string `json:"kibana_api_key,omitempty"` + KibanaUrl *string `json:"kibana_url,omitempty"` Name string `json:"name"` Preset *OutputRemoteElasticsearchPreset `json:"preset,omitempty"` - ProxyId *string `json:"proxy_id"` + ProxyId *string `json:"proxy_id,omitempty"` Secrets *OutputRemoteElasticsearch_Secrets `json:"secrets,omitempty"` - ServiceToken *string `json:"service_token"` - Shipper *OutputShipper `json:"shipper"` - Ssl *OutputSsl `json:"ssl"` + ServiceToken *string `json:"service_token,omitempty"` + Shipper *OutputShipper `json:"shipper,omitempty"` + Ssl *OutputSsl `json:"ssl,omitempty"` SyncIntegrations *bool `json:"sync_integrations,omitempty"` SyncUninstalledIntegrations *bool `json:"sync_uninstalled_integrations,omitempty"` Type OutputRemoteElasticsearchType `json:"type"` @@ -2028,16 +2506,16 @@ type OutputRemoteElasticsearchType string // OutputShipper defines model for output_shipper. type OutputShipper struct { - CompressionLevel *float32 `json:"compression_level"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled"` - DiskQueueEnabled *bool `json:"disk_queue_enabled"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size"` - DiskQueuePath *string `json:"disk_queue_path"` - Loadbalance *bool `json:"loadbalance"` - MaxBatchBytes *float32 `json:"max_batch_bytes"` - MemQueueEvents *float32 `json:"mem_queue_events"` - QueueFlushTimeout *float32 `json:"queue_flush_timeout"` + CompressionLevel *float32 `json:"compression_level,omitempty"` + DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` + DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` + DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` + DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` + DiskQueuePath *string `json:"disk_queue_path,omitempty"` + Loadbalance *bool `json:"loadbalance,omitempty"` + MaxBatchBytes *float32 `json:"max_batch_bytes,omitempty"` + MemQueueEvents *float32 `json:"mem_queue_events,omitempty"` + QueueFlushTimeout *float32 `json:"queue_flush_timeout,omitempty"` AdditionalProperties map[string]interface{} `json:"-"` } @@ -2270,7 +2748,7 @@ type PackageInfo_InstallationInfo struct { Namespaces *[]string `json:"namespaces,omitempty"` Type string `json:"type"` UpdatedAt *string `json:"updated_at,omitempty"` - VerificationKeyId *string `json:"verification_key_id"` + VerificationKeyId *string `json:"verification_key_id,omitempty"` VerificationStatus PackageInfoInstallationInfoVerificationStatus `json:"verification_status"` Version string `json:"version"` AdditionalProperties map[string]interface{} `json:"-"` @@ -2502,7 +2980,7 @@ type PackageListItem_InstallationInfo struct { Namespaces *[]string `json:"namespaces,omitempty"` Type string `json:"type"` UpdatedAt *string `json:"updated_at,omitempty"` - VerificationKeyId *string `json:"verification_key_id"` + VerificationKeyId *string `json:"verification_key_id,omitempty"` VerificationStatus PackageListItemInstallationInfoVerificationStatus `json:"verification_status"` Version string `json:"version"` AdditionalProperties map[string]interface{} `json:"-"` @@ -2547,7 +3025,7 @@ type PackageListItem_Type struct { // PackagePolicy defines model for package_policy. type PackagePolicy struct { // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. - AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions"` + AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions,omitempty"` Agents *float32 `json:"agents,omitempty"` CreatedAt string `json:"created_at"` CreatedBy string `json:"created_by"` @@ -2567,12 +3045,12 @@ type PackagePolicy struct { // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. Namespace *string `json:"namespace,omitempty"` - OutputId *string `json:"output_id"` + OutputId *string `json:"output_id,omitempty"` // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. Overrides *struct { Inputs *map[string]interface{} `json:"inputs,omitempty"` - } `json:"overrides"` + } `json:"overrides,omitempty"` Package *struct { ExperimentalDataStreamFeatures *[]struct { DataStream string `json:"data_stream"` @@ -2594,15 +3072,15 @@ type PackagePolicy struct { } `json:"package,omitempty"` // PolicyId Agent policy ID where that package policy will be added - // Deprecated: - PolicyId *string `json:"policy_id"` + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id,omitempty"` PolicyIds *[]string `json:"policy_ids,omitempty"` Revision float32 `json:"revision"` SecretReferences *[]PackagePolicySecretRef `json:"secret_references,omitempty"` SpaceIds *[]string `json:"spaceIds,omitempty"` // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. - SupportsAgentless *bool `json:"supports_agentless"` + SupportsAgentless *bool `json:"supports_agentless,omitempty"` UpdatedAt string `json:"updated_at"` UpdatedBy string `json:"updated_by"` Vars *map[string]interface{} `json:"vars,omitempty"` @@ -2652,7 +3130,7 @@ type PackagePolicyRequest struct { Namespace *string `json:"namespace,omitempty"` OutputId *string `json:"output_id,omitempty"` Package PackagePolicyRequestPackage `json:"package"` - PolicyId *string `json:"policy_id"` + PolicyId *string `json:"policy_id,omitempty"` PolicyIds *[]string `json:"policy_ids,omitempty"` // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. @@ -2703,6 +3181,51 @@ type PackagePolicySecretRef struct { Id string `json:"id"` } +// PagerdutyConfig Defines properties for connectors when type is `.pagerduty`. +type PagerdutyConfig struct { + // ApiUrl The PagerDuty event URL. + ApiUrl *string `json:"apiUrl,omitempty"` +} + +// PagerdutySecrets Defines secrets for connectors when type is `.pagerduty`. +type PagerdutySecrets struct { + // RoutingKey A 32 character PagerDuty Integration Key for an integration on a service. + RoutingKey string `json:"routingKey"` +} + +// Pfx If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-pfx`, it is a base64 encoded version of the PFX or P12 file. +type Pfx = string + +// ResilientConfig Defines properties for connectors when type is `.resilient`. +type ResilientConfig struct { + // ApiUrl The IBM Resilient instance URL. + ApiUrl string `json:"apiUrl"` + + // OrgId The IBM Resilient organization ID. + OrgId string `json:"orgId"` +} + +// ResilientSecrets Defines secrets for connectors when type is `.resilient`. +type ResilientSecrets struct { + // ApiKeyId The authentication key ID for HTTP Basic authentication. + ApiKeyId string `json:"apiKeyId"` + + // ApiKeySecret The authentication key secret for HTTP Basic authentication. + ApiKeySecret string `json:"apiKeySecret"` +} + +// SentineloneConfig Defines properties for connectors when type is `.sentinelone`. +type SentineloneConfig struct { + // Url The SentinelOne tenant URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + Url string `json:"url"` +} + +// SentineloneSecrets Defines secrets for connectors when type is `.sentinelone`. +type SentineloneSecrets struct { + // Token The A SentinelOne API token. + Token string `json:"token"` +} + // ServerHost defines model for server_host. type ServerHost struct { HostUrls []string `json:"host_urls"` @@ -2711,7 +3234,7 @@ type ServerHost struct { IsInternal *bool `json:"is_internal,omitempty"` IsPreconfigured *bool `json:"is_preconfigured,omitempty"` Name string `json:"name"` - ProxyId *string `json:"proxy_id"` + ProxyId *string `json:"proxy_id,omitempty"` Secrets *struct { Ssl *struct { EsKey *ServerHost_Secrets_Ssl_EsKey `json:"es_key,omitempty"` @@ -2726,7 +3249,7 @@ type ServerHost struct { EsCertificateAuthorities *[]string `json:"es_certificate_authorities,omitempty"` EsKey *string `json:"es_key,omitempty"` Key *string `json:"key,omitempty"` - } `json:"ssl"` + } `json:"ssl,omitempty"` } // ServerHostSecretsSslEsKey0 defines model for . @@ -2758,65 +3281,329 @@ type ServerHost_Secrets_Ssl_Key struct { // ServerHostSslClientAuth defines model for ServerHost.Ssl.ClientAuth. type ServerHostSslClientAuth string -// UpdateOutputElasticsearch defines model for update_output_elasticsearch. -type UpdateOutputElasticsearch struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml"` - Hosts *[]string `json:"hosts,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Name *string `json:"name,omitempty"` - Preset *UpdateOutputElasticsearchPreset `json:"preset,omitempty"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *struct { - Ssl *struct { - Key *UpdateOutputElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - Shipper *UpdateOutputShipper `json:"shipper,omitempty"` - Ssl *UpdateOutputSsl `json:"ssl,omitempty"` - Type *UpdateOutputElasticsearchType `json:"type,omitempty"` +// ServicenowConfig Defines properties for connectors when type is `.servicenow`. +type ServicenowConfig struct { + // ApiUrl The ServiceNow instance URL. + ApiUrl string `json:"apiUrl"` + + // ClientId The client ID assigned to your OAuth application. This property is required when `isOAuth` is `true`. + ClientId *string `json:"clientId,omitempty"` + + // IsOAuth The type of authentication to use. The default value is false, which means basic authentication is used instead of open authorization (OAuth). + IsOAuth *bool `json:"isOAuth,omitempty"` + + // JwtKeyId The key identifier assigned to the JWT verifier map of your OAuth application. This property is required when `isOAuth` is `true`. + JwtKeyId *string `json:"jwtKeyId,omitempty"` + + // UserIdentifierValue The identifier to use for OAuth authentication. This identifier should be the user field you selected when you created an OAuth JWT API endpoint for external clients in your ServiceNow instance. For example, if the selected user field is `Email`, the user identifier should be the user's email address. This property is required when `isOAuth` is `true`. + UserIdentifierValue *string `json:"userIdentifierValue,omitempty"` + + // UsesTableApi Determines whether the connector uses the Table API or the Import Set API. This property is supported only for ServiceNow ITSM and ServiceNow SecOps connectors. NOTE: If this property is set to `false`, the Elastic application should be installed in ServiceNow. + UsesTableApi *bool `json:"usesTableApi,omitempty"` } -// UpdateOutputElasticsearchPreset defines model for UpdateOutputElasticsearch.Preset. -type UpdateOutputElasticsearchPreset string +// ServicenowItomConfig Defines properties for connectors when type is `.servicenow-itom`. +type ServicenowItomConfig struct { + // ApiUrl The ServiceNow instance URL. + ApiUrl string `json:"apiUrl"` -// UpdateOutputElasticsearchSecretsSslKey0 defines model for . -type UpdateOutputElasticsearchSecretsSslKey0 struct { - Id string `json:"id"` + // ClientId The client ID assigned to your OAuth application. This property is required when `isOAuth` is `true`. + ClientId *string `json:"clientId,omitempty"` + + // IsOAuth The type of authentication to use. The default value is false, which means basic authentication is used instead of open authorization (OAuth). + IsOAuth *bool `json:"isOAuth,omitempty"` + + // JwtKeyId The key identifier assigned to the JWT verifier map of your OAuth application. This property is required when `isOAuth` is `true`. + JwtKeyId *string `json:"jwtKeyId,omitempty"` + + // UserIdentifierValue The identifier to use for OAuth authentication. This identifier should be the user field you selected when you created an OAuth JWT API endpoint for external clients in your ServiceNow instance. For example, if the selected user field is `Email`, the user identifier should be the user's email address. This property is required when `isOAuth` is `true`. + UserIdentifierValue *string `json:"userIdentifierValue,omitempty"` } -// UpdateOutputElasticsearchSecretsSslKey1 defines model for . -type UpdateOutputElasticsearchSecretsSslKey1 = string +// ServicenowSecrets Defines secrets for connectors when type is `.servicenow`, `.servicenow-sir`, or `.servicenow-itom`. +type ServicenowSecrets struct { + // ClientSecret The client secret assigned to your OAuth application. This property is required when `isOAuth` is `true`. + ClientSecret *string `json:"clientSecret,omitempty"` -// UpdateOutputElasticsearch_Secrets_Ssl_Key defines model for UpdateOutputElasticsearch.Secrets.Ssl.Key. -type UpdateOutputElasticsearch_Secrets_Ssl_Key struct { - union json.RawMessage + // Password The password for HTTP basic authentication. This property is required when `isOAuth` is `false`. + Password *string `json:"password,omitempty"` + + // PrivateKey The RSA private key that you created for use in ServiceNow. This property is required when `isOAuth` is `true`. + PrivateKey *string `json:"privateKey,omitempty"` + + // PrivateKeyPassword The password for the RSA private key. This property is required when `isOAuth` is `true` and you set a password on your private key. + PrivateKeyPassword *string `json:"privateKeyPassword,omitempty"` + + // Username The username for HTTP basic authentication. This property is required when `isOAuth` is `false`. + Username *string `json:"username,omitempty"` } -// UpdateOutputElasticsearchType defines model for UpdateOutputElasticsearch.Type. -type UpdateOutputElasticsearchType string +// SlackApiConfig Defines properties for connectors when type is `.slack_api`. +type SlackApiConfig struct { + // AllowedChannels A list of valid Slack channels. + AllowedChannels *[]struct { + // Id The Slack channel ID. + Id string `json:"id"` -// UpdateOutputKafka defines model for update_output_kafka. -type UpdateOutputKafka struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - AuthType *UpdateOutputKafkaAuthType `json:"auth_type,omitempty"` - BrokerTimeout *float32 `json:"broker_timeout,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ClientId *string `json:"client_id,omitempty"` - Compression *UpdateOutputKafkaCompression `json:"compression,omitempty"` - CompressionLevel interface{} `json:"compression_level"` - ConfigYaml *string `json:"config_yaml"` - ConnectionType interface{} `json:"connection_type"` - Hash *struct { - Hash *string `json:"hash,omitempty"` - Random *bool `json:"random,omitempty"` - } `json:"hash,omitempty"` + // Name The Slack channel name. + Name string `json:"name"` + } `json:"allowedChannels,omitempty"` +} + +// SlackApiSecrets Defines secrets for connectors when type is `.slack`. +type SlackApiSecrets struct { + // Token Slack bot user OAuth token. + Token string `json:"token"` +} + +// SwimlaneConfig Defines properties for connectors when type is `.swimlane`. +type SwimlaneConfig struct { + // ApiUrl The Swimlane instance URL. + ApiUrl string `json:"apiUrl"` + + // AppId The Swimlane application ID. + AppId string `json:"appId"` + + // ConnectorType The type of connector. Valid values are `all`, `alerts`, and `cases`. + ConnectorType SwimlaneConfigConnectorType `json:"connectorType"` + + // Mappings The field mapping. + Mappings *struct { + // AlertIdConfig Mapping for the alert ID. + AlertIdConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"alertIdConfig,omitempty"` + + // CaseIdConfig Mapping for the case ID. + CaseIdConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"caseIdConfig,omitempty"` + + // CaseNameConfig Mapping for the case name. + CaseNameConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"caseNameConfig,omitempty"` + + // CommentsConfig Mapping for the case comments. + CommentsConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"commentsConfig,omitempty"` + + // DescriptionConfig Mapping for the case description. + DescriptionConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"descriptionConfig,omitempty"` + + // RuleNameConfig Mapping for the name of the alert's rule. + RuleNameConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"ruleNameConfig,omitempty"` + + // SeverityConfig Mapping for the severity. + SeverityConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"severityConfig,omitempty"` + } `json:"mappings,omitempty"` +} + +// SwimlaneConfigConnectorType The type of connector. Valid values are `all`, `alerts`, and `cases`. +type SwimlaneConfigConnectorType string + +// SwimlaneSecrets Defines secrets for connectors when type is `.swimlane`. +type SwimlaneSecrets struct { + // ApiToken Swimlane API authentication token. + ApiToken *string `json:"apiToken,omitempty"` +} + +// TeamsSecrets Defines secrets for connectors when type is `.teams`. +type TeamsSecrets struct { + // WebhookUrl The URL of the incoming webhook. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + WebhookUrl string `json:"webhookUrl"` +} + +// ThehiveConfig Defines configuration properties for connectors when type is `.thehive`. +type ThehiveConfig struct { + // Organisation The organisation in TheHive that will contain the alerts or cases. By default, the connector uses the default organisation of the user account that created the API key. + Organisation *string `json:"organisation,omitempty"` + + // Url The instance URL in TheHive. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + Url string `json:"url"` +} + +// ThehiveSecrets Defines secrets for connectors when type is `.thehive`. +type ThehiveSecrets struct { + // ApiKey The API key for authentication in TheHive. + ApiKey string `json:"apiKey"` +} + +// TinesConfig Defines properties for connectors when type is `.tines`. +type TinesConfig struct { + // Url The Tines tenant URL. If you are using the `xpack.actions.allowedHosts` setting, make sure this hostname is added to the allowed hosts. + Url string `json:"url"` +} + +// TinesSecrets Defines secrets for connectors when type is `.tines`. +type TinesSecrets struct { + // Email The email used to sign in to Tines. + Email string `json:"email"` + + // Token The Tines API token. + Token string `json:"token"` +} + +// TorqConfig Defines properties for connectors when type is `.torq`. +type TorqConfig struct { + // WebhookIntegrationUrl The endpoint URL of the Elastic Security integration in Torq. + WebhookIntegrationUrl string `json:"webhookIntegrationUrl"` +} + +// TorqSecrets Defines secrets for connectors when type is `.torq`. +type TorqSecrets struct { + // Token The secret of the webhook authentication header. + Token string `json:"token"` +} + +// UpdateConnectorConfig The connector configuration details. +type UpdateConnectorConfig struct { + AdditionalProperties map[string]interface{} `json:"-"` + union json.RawMessage +} + +// UpdateConnectorSecrets defines model for update_connector_secrets. +type UpdateConnectorSecrets struct { + AdditionalProperties map[string]interface{} `json:"-"` + union json.RawMessage +} + +// UpdateOutputElasticsearch defines model for update_output_elasticsearch. +type UpdateOutputElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` + Hosts *[]string `json:"hosts,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name *string `json:"name,omitempty"` + Preset *UpdateOutputElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + Key *UpdateOutputElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Shipper *UpdateOutputShipper `json:"shipper,omitempty"` + Ssl *UpdateOutputSsl `json:"ssl,omitempty"` + Type *UpdateOutputElasticsearchType `json:"type,omitempty"` +} + +// UpdateOutputElasticsearchPreset defines model for UpdateOutputElasticsearch.Preset. +type UpdateOutputElasticsearchPreset string + +// UpdateOutputElasticsearchSecretsSslKey0 defines model for . +type UpdateOutputElasticsearchSecretsSslKey0 struct { + Id string `json:"id"` +} + +// UpdateOutputElasticsearchSecretsSslKey1 defines model for . +type UpdateOutputElasticsearchSecretsSslKey1 = string + +// UpdateOutputElasticsearch_Secrets_Ssl_Key defines model for UpdateOutputElasticsearch.Secrets.Ssl.Key. +type UpdateOutputElasticsearch_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// UpdateOutputElasticsearchType defines model for UpdateOutputElasticsearch.Type. +type UpdateOutputElasticsearchType string + +// UpdateOutputKafka defines model for update_output_kafka. +type UpdateOutputKafka struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + AuthType *UpdateOutputKafkaAuthType `json:"auth_type,omitempty"` + BrokerTimeout *float32 `json:"broker_timeout,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ClientId *string `json:"client_id,omitempty"` + Compression *UpdateOutputKafkaCompression `json:"compression,omitempty"` + CompressionLevel interface{} `json:"compression_level"` + ConfigYaml *string `json:"config_yaml,omitempty"` + ConnectionType interface{} `json:"connection_type"` + Hash *struct { + Hash *string `json:"hash,omitempty"` + Random *bool `json:"random,omitempty"` + } `json:"hash,omitempty"` Headers *[]struct { Key string `json:"key"` Value string `json:"value"` @@ -2840,7 +3627,7 @@ type UpdateOutputKafka struct { } `json:"round_robin,omitempty"` Sasl *struct { Mechanism *UpdateOutputKafkaSaslMechanism `json:"mechanism,omitempty"` - } `json:"sasl"` + } `json:"sasl,omitempty"` Secrets *struct { Password *UpdateOutputKafka_Secrets_Password `json:"password,omitempty"` Ssl *struct { @@ -2905,7 +3692,7 @@ type UpdateOutputLogstash struct { AllowEdit *[]string `json:"allow_edit,omitempty"` CaSha256 *string `json:"ca_sha256,omitempty"` CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml"` + ConfigYaml *string `json:"config_yaml,omitempty"` Hosts *[]string `json:"hosts,omitempty"` IsDefault *bool `json:"is_default,omitempty"` IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` @@ -2944,14 +3731,14 @@ type UpdateOutputRemoteElasticsearch struct { AllowEdit *[]string `json:"allow_edit,omitempty"` CaSha256 *string `json:"ca_sha256,omitempty"` CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml"` + ConfigYaml *string `json:"config_yaml,omitempty"` Hosts *[]string `json:"hosts,omitempty"` IsDefault *bool `json:"is_default,omitempty"` IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` IsInternal *bool `json:"is_internal,omitempty"` IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - KibanaApiKey *string `json:"kibana_api_key"` - KibanaUrl *string `json:"kibana_url"` + KibanaApiKey *string `json:"kibana_api_key,omitempty"` + KibanaUrl *string `json:"kibana_url,omitempty"` Name *string `json:"name,omitempty"` Preset *UpdateOutputRemoteElasticsearchPreset `json:"preset,omitempty"` ProxyId *string `json:"proxy_id,omitempty"` @@ -2961,7 +3748,7 @@ type UpdateOutputRemoteElasticsearch struct { Key *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` } `json:"ssl,omitempty"` } `json:"secrets,omitempty"` - ServiceToken *string `json:"service_token"` + ServiceToken *string `json:"service_token,omitempty"` Shipper *UpdateOutputShipper `json:"shipper,omitempty"` Ssl *UpdateOutputSsl `json:"ssl,omitempty"` SyncIntegrations *bool `json:"sync_integrations,omitempty"` @@ -3003,16 +3790,16 @@ type UpdateOutputRemoteElasticsearchType string // UpdateOutputShipper defines model for update_output_shipper. type UpdateOutputShipper struct { - CompressionLevel *float32 `json:"compression_level"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled"` - DiskQueueEnabled *bool `json:"disk_queue_enabled"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size"` - DiskQueuePath *string `json:"disk_queue_path"` - Loadbalance *bool `json:"loadbalance"` - MaxBatchBytes *float32 `json:"max_batch_bytes"` - MemQueueEvents *float32 `json:"mem_queue_events"` - QueueFlushTimeout *float32 `json:"queue_flush_timeout"` + CompressionLevel *float32 `json:"compression_level,omitempty"` + DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` + DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` + DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` + DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` + DiskQueuePath *string `json:"disk_queue_path,omitempty"` + Loadbalance *bool `json:"loadbalance,omitempty"` + MaxBatchBytes *float32 `json:"max_batch_bytes,omitempty"` + MemQueueEvents *float32 `json:"mem_queue_events,omitempty"` + QueueFlushTimeout *float32 `json:"queue_flush_timeout,omitempty"` } // UpdateOutputSsl defines model for update_output_ssl. @@ -3031,6 +3818,78 @@ type UpdateOutputUnion struct { union json.RawMessage } +// VerificationMode Controls the verification of certificates. Use `full` to validate that the certificate has an issue date within the `not_before` and `not_after` dates, chains to a trusted certificate authority (CA), and has a hostname or IP address that matches the names within the certificate. Use `certificate` to validate the certificate and verify that it is signed by a trusted authority; this option does not check the certificate hostname. Use `none` to skip certificate validation. +type VerificationMode string + +// WebhookConfig Defines properties for connectors when type is `.webhook`. +type WebhookConfig struct { + // AuthType The type of authentication to use: basic, SSL, or none. + AuthType *AuthType `json:"authType,omitempty"` + + // Ca A base64 encoded version of the certificate authority file that the connector can trust to sign and validate certificates. This option is available for all authentication types. + Ca *Ca `json:"ca,omitempty"` + + // CertType If the `authType` is `webhook-authentication-ssl`, specifies whether the certificate authentication data is in a CRT and key file format or a PFX file format. + CertType *CertType `json:"certType,omitempty"` + + // HasAuth If true, a username and password for login type authentication must be provided. + HasAuth *HasAuth `json:"hasAuth,omitempty"` + + // Headers A set of key-value pairs sent as headers with the request. + Headers *map[string]interface{} `json:"headers,omitempty"` + + // Method The HTTP request method, either `post` or `put`. + Method *WebhookConfigMethod `json:"method,omitempty"` + + // Url The request URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + Url *string `json:"url,omitempty"` + + // VerificationMode Controls the verification of certificates. Use `full` to validate that the certificate has an issue date within the `not_before` and `not_after` dates, chains to a trusted certificate authority (CA), and has a hostname or IP address that matches the names within the certificate. Use `certificate` to validate the certificate and verify that it is signed by a trusted authority; this option does not check the certificate hostname. Use `none` to skip certificate validation. + VerificationMode *VerificationMode `json:"verificationMode,omitempty"` +} + +// WebhookConfigMethod The HTTP request method, either `post` or `put`. +type WebhookConfigMethod string + +// WebhookSecrets Defines secrets for connectors when type is `.webhook`. +type WebhookSecrets struct { + // Crt If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the CRT or CERT file. + Crt *Crt `json:"crt,omitempty"` + + // Key If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the KEY file. + Key *Key `json:"key,omitempty"` + + // Password The password for HTTP basic authentication or the passphrase for the SSL certificate files. If `hasAuth` is set to `true` and `authType` is `webhook-authentication-basic`, this property is required. + Password *string `json:"password,omitempty"` + + // Pfx If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-pfx`, it is a base64 encoded version of the PFX or P12 file. + Pfx *Pfx `json:"pfx,omitempty"` + + // User The username for HTTP basic authentication. If `hasAuth` is set to `true` and `authType` is `webhook-authentication-basic`, this property is required. + User *string `json:"user,omitempty"` +} + +// XmattersConfig Defines properties for connectors when type is `.xmatters`. +type XmattersConfig struct { + // ConfigUrl The request URL for the Elastic Alerts trigger in xMatters. It is applicable only when `usesBasic` is `true`. + ConfigUrl *string `json:"configUrl,omitempty"` + + // UsesBasic Specifies whether the connector uses HTTP basic authentication (`true`) or URL authentication (`false`). + UsesBasic *bool `json:"usesBasic,omitempty"` +} + +// XmattersSecrets Defines secrets for connectors when type is `.xmatters`. +type XmattersSecrets struct { + // Password A user name for HTTP basic authentication. It is applicable only when `usesBasic` is `true`. + Password *string `json:"password,omitempty"` + + // SecretsUrl The request URL for the Elastic Alerts trigger in xMatters with the API key included in the URL. It is applicable only when `usesBasic` is `false`. + SecretsUrl *string `json:"secretsUrl,omitempty"` + + // User A password for HTTP basic authentication. It is applicable only when `usesBasic` is `true`. + User *string `json:"user,omitempty"` +} + // APMUIElasticApiVersion defines model for APM_UI_elastic_api_version. type APMUIElasticApiVersion string @@ -3099,15 +3958,15 @@ type GetFleetAgentPoliciesParamsFormat string // PostFleetAgentPoliciesJSONBody defines parameters for PostFleetAgentPolicies. type PostFleetAgentPoliciesJSONBody struct { AdvancedSettings *struct { - AgentDownloadTargetDirectory *interface{} `json:"agent_download_target_directory"` - AgentDownloadTimeout *interface{} `json:"agent_download_timeout"` - AgentLimitsGoMaxProcs *interface{} `json:"agent_limits_go_max_procs"` - AgentLoggingFilesInterval *interface{} `json:"agent_logging_files_interval"` - AgentLoggingFilesKeepfiles *interface{} `json:"agent_logging_files_keepfiles"` - AgentLoggingFilesRotateeverybytes *interface{} `json:"agent_logging_files_rotateeverybytes"` - AgentLoggingLevel *interface{} `json:"agent_logging_level"` - AgentLoggingMetricsPeriod *interface{} `json:"agent_logging_metrics_period"` - AgentLoggingToFiles *interface{} `json:"agent_logging_to_files"` + AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory,omitempty"` + AgentDownloadTimeout interface{} `json:"agent_download_timeout,omitempty"` + AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs,omitempty"` + AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval,omitempty"` + AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles,omitempty"` + AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes,omitempty"` + AgentLoggingLevel interface{} `json:"agent_logging_level,omitempty"` + AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period,omitempty"` + AgentLoggingToFiles interface{} `json:"agent_logging_to_files,omitempty"` } `json:"advanced_settings,omitempty"` AgentFeatures *[]struct { Enabled bool `json:"enabled"` @@ -3125,10 +3984,10 @@ type PostFleetAgentPoliciesJSONBody struct { } `json:"requests,omitempty"` } `json:"resources,omitempty"` } `json:"agentless,omitempty"` - DataOutputId *string `json:"data_output_id"` + DataOutputId *string `json:"data_output_id,omitempty"` Description *string `json:"description,omitempty"` - DownloadSourceId *string `json:"download_source_id"` - FleetServerHostId *string `json:"fleet_server_host_id"` + DownloadSourceId *string `json:"download_source_id,omitempty"` + FleetServerHostId *string `json:"fleet_server_host_id,omitempty"` Force *bool `json:"force,omitempty"` // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. @@ -3163,7 +4022,7 @@ type PostFleetAgentPoliciesJSONBody struct { Host *string `json:"host,omitempty"` Port *float32 `json:"port,omitempty"` } `json:"monitoring_http,omitempty"` - MonitoringOutputId *string `json:"monitoring_output_id"` + MonitoringOutputId *string `json:"monitoring_output_id,omitempty"` MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` Name string `json:"name"` Namespace string `json:"namespace"` @@ -3211,15 +4070,15 @@ type GetFleetAgentPoliciesAgentpolicyidParamsFormat string // PutFleetAgentPoliciesAgentpolicyidJSONBody defines parameters for PutFleetAgentPoliciesAgentpolicyid. type PutFleetAgentPoliciesAgentpolicyidJSONBody struct { AdvancedSettings *struct { - AgentDownloadTargetDirectory *interface{} `json:"agent_download_target_directory"` - AgentDownloadTimeout *interface{} `json:"agent_download_timeout"` - AgentLimitsGoMaxProcs *interface{} `json:"agent_limits_go_max_procs"` - AgentLoggingFilesInterval *interface{} `json:"agent_logging_files_interval"` - AgentLoggingFilesKeepfiles *interface{} `json:"agent_logging_files_keepfiles"` - AgentLoggingFilesRotateeverybytes *interface{} `json:"agent_logging_files_rotateeverybytes"` - AgentLoggingLevel *interface{} `json:"agent_logging_level"` - AgentLoggingMetricsPeriod *interface{} `json:"agent_logging_metrics_period"` - AgentLoggingToFiles *interface{} `json:"agent_logging_to_files"` + AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory,omitempty"` + AgentDownloadTimeout interface{} `json:"agent_download_timeout,omitempty"` + AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs,omitempty"` + AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval,omitempty"` + AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles,omitempty"` + AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes,omitempty"` + AgentLoggingLevel interface{} `json:"agent_logging_level,omitempty"` + AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period,omitempty"` + AgentLoggingToFiles interface{} `json:"agent_logging_to_files,omitempty"` } `json:"advanced_settings,omitempty"` AgentFeatures *[]struct { Enabled bool `json:"enabled"` @@ -3238,10 +4097,10 @@ type PutFleetAgentPoliciesAgentpolicyidJSONBody struct { } `json:"resources,omitempty"` } `json:"agentless,omitempty"` BumpRevision *bool `json:"bumpRevision,omitempty"` - DataOutputId *string `json:"data_output_id"` + DataOutputId *string `json:"data_output_id,omitempty"` Description *string `json:"description,omitempty"` - DownloadSourceId *string `json:"download_source_id"` - FleetServerHostId *string `json:"fleet_server_host_id"` + DownloadSourceId *string `json:"download_source_id,omitempty"` + FleetServerHostId *string `json:"fleet_server_host_id,omitempty"` Force *bool `json:"force,omitempty"` // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. @@ -3276,7 +4135,7 @@ type PutFleetAgentPoliciesAgentpolicyidJSONBody struct { Host *string `json:"host,omitempty"` Port *float32 `json:"port,omitempty"` } `json:"monitoring_http,omitempty"` - MonitoringOutputId *string `json:"monitoring_output_id"` + MonitoringOutputId *string `json:"monitoring_output_id,omitempty"` MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` Name string `json:"name"` Namespace string `json:"namespace"` @@ -3533,6 +4392,29 @@ type PutParameterJSONBody struct { Value *string `json:"value,omitempty"` } +// PostActionsConnectorIdJSONBody defines parameters for PostActionsConnectorId. +type PostActionsConnectorIdJSONBody struct { + // Config The connector configuration details. + Config *CreateConnectorConfig `json:"config,omitempty"` + + // ConnectorTypeId The type of connector. + ConnectorTypeId string `json:"connector_type_id"` + + // Name The display name for the connector. + Name string `json:"name"` + Secrets *CreateConnectorSecrets `json:"secrets,omitempty"` +} + +// PutActionsConnectorIdJSONBody defines parameters for PutActionsConnectorId. +type PutActionsConnectorIdJSONBody struct { + // Config The connector configuration details. + Config *UpdateConnectorConfig `json:"config,omitempty"` + + // Name The display name for the connector. + Name string `json:"name"` + Secrets *UpdateConnectorSecrets `json:"secrets,omitempty"` +} + // DeleteAgentConfigurationJSONRequestBody defines body for DeleteAgentConfiguration for application/json ContentType. type DeleteAgentConfigurationJSONRequestBody = APMUIDeleteServiceObject @@ -3575,6 +4457,12 @@ type PostParametersJSONRequestBody PostParametersJSONBody // PutParameterJSONRequestBody defines body for PutParameter for application/json ContentType. type PutParameterJSONRequestBody PutParameterJSONBody +// PostActionsConnectorIdJSONRequestBody defines body for PostActionsConnectorId for application/json ContentType. +type PostActionsConnectorIdJSONRequestBody PostActionsConnectorIdJSONBody + +// PutActionsConnectorIdJSONRequestBody defines body for PutActionsConnectorId for application/json ContentType. +type PutActionsConnectorIdJSONRequestBody PutActionsConnectorIdJSONBody + // CreateDataViewDefaultwJSONRequestBody defines body for CreateDataViewDefaultw for application/json ContentType. type CreateDataViewDefaultwJSONRequestBody = DataViewsCreateDataViewRequestObject @@ -3717,6 +4605,40 @@ func (a AgentPolicy_PackagePolicies_1_Elasticsearch) MarshalJSON() ([]byte, erro return json.Marshal(object) } +// Getter for additional properties for CreateConnectorConfig. Returns the specified +// element and whether it was found +func (a CreateConnectorConfig) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for CreateConnectorConfig +func (a *CreateConnectorConfig) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Getter for additional properties for CreateConnectorSecrets. Returns the specified +// element and whether it was found +func (a CreateConnectorSecrets) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for CreateConnectorSecrets +func (a *CreateConnectorSecrets) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + // Getter for additional properties for OutputElasticsearch. Returns the specified // element and whether it was found func (a OutputElasticsearch) Get(fieldName string) (value interface{}, found bool) { @@ -11464,6 +12386,40 @@ func (a PackagePolicy_Elasticsearch) MarshalJSON() ([]byte, error) { return json.Marshal(object) } +// Getter for additional properties for UpdateConnectorConfig. Returns the specified +// element and whether it was found +func (a UpdateConnectorConfig) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for UpdateConnectorConfig +func (a *UpdateConnectorConfig) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Getter for additional properties for UpdateConnectorSecrets. Returns the specified +// element and whether it was found +func (a UpdateConnectorSecrets) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for UpdateConnectorSecrets +func (a *UpdateConnectorSecrets) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + // AsAgentPolicyPackagePolicies1Inputs1StreamsVars0 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars0 func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars0() (AgentPolicyPackagePolicies1Inputs1StreamsVars0, error) { var body AgentPolicyPackagePolicies1Inputs1StreamsVars0 @@ -12210,22 +13166,22 @@ func (t *AgentPolicyGlobalDataTagsItem_Value) UnmarshalJSON(b []byte) error { return err } -// AsCreateParamResponse0 returns the union data inside the CreateParamResponse as a CreateParamResponse0 -func (t CreateParamResponse) AsCreateParamResponse0() (CreateParamResponse0, error) { - var body CreateParamResponse0 +// AsBedrockConfig returns the union data inside the CreateConnectorConfig as a BedrockConfig +func (t CreateConnectorConfig) AsBedrockConfig() (BedrockConfig, error) { + var body BedrockConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromCreateParamResponse0 overwrites any union data inside the CreateParamResponse as the provided CreateParamResponse0 -func (t *CreateParamResponse) FromCreateParamResponse0(v CreateParamResponse0) error { +// FromBedrockConfig overwrites any union data inside the CreateConnectorConfig as the provided BedrockConfig +func (t *CreateConnectorConfig) FromBedrockConfig(v BedrockConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeCreateParamResponse0 performs a merge with any union data inside the CreateParamResponse, using the provided CreateParamResponse0 -func (t *CreateParamResponse) MergeCreateParamResponse0(v CreateParamResponse0) error { +// MergeBedrockConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided BedrockConfig +func (t *CreateConnectorConfig) MergeBedrockConfig(v BedrockConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12236,22 +13192,22 @@ func (t *CreateParamResponse) MergeCreateParamResponse0(v CreateParamResponse0) return err } -// AsSyntheticsPostParameterResponse returns the union data inside the CreateParamResponse as a SyntheticsPostParameterResponse -func (t CreateParamResponse) AsSyntheticsPostParameterResponse() (SyntheticsPostParameterResponse, error) { - var body SyntheticsPostParameterResponse +// AsCrowdstrikeConfig returns the union data inside the CreateConnectorConfig as a CrowdstrikeConfig +func (t CreateConnectorConfig) AsCrowdstrikeConfig() (CrowdstrikeConfig, error) { + var body CrowdstrikeConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromSyntheticsPostParameterResponse overwrites any union data inside the CreateParamResponse as the provided SyntheticsPostParameterResponse -func (t *CreateParamResponse) FromSyntheticsPostParameterResponse(v SyntheticsPostParameterResponse) error { +// FromCrowdstrikeConfig overwrites any union data inside the CreateConnectorConfig as the provided CrowdstrikeConfig +func (t *CreateConnectorConfig) FromCrowdstrikeConfig(v CrowdstrikeConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeSyntheticsPostParameterResponse performs a merge with any union data inside the CreateParamResponse, using the provided SyntheticsPostParameterResponse -func (t *CreateParamResponse) MergeSyntheticsPostParameterResponse(v SyntheticsPostParameterResponse) error { +// MergeCrowdstrikeConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided CrowdstrikeConfig +func (t *CreateConnectorConfig) MergeCrowdstrikeConfig(v CrowdstrikeConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12262,32 +13218,22 @@ func (t *CreateParamResponse) MergeSyntheticsPostParameterResponse(v SyntheticsP return err } -func (t CreateParamResponse) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *CreateParamResponse) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsNewOutputElasticsearchSecretsSslKey0 returns the union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as a NewOutputElasticsearchSecretsSslKey0 -func (t NewOutputElasticsearch_Secrets_Ssl_Key) AsNewOutputElasticsearchSecretsSslKey0() (NewOutputElasticsearchSecretsSslKey0, error) { - var body NewOutputElasticsearchSecretsSslKey0 +// AsD3securityConfig returns the union data inside the CreateConnectorConfig as a D3securityConfig +func (t CreateConnectorConfig) AsD3securityConfig() (D3securityConfig, error) { + var body D3securityConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputElasticsearchSecretsSslKey0 overwrites any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as the provided NewOutputElasticsearchSecretsSslKey0 -func (t *NewOutputElasticsearch_Secrets_Ssl_Key) FromNewOutputElasticsearchSecretsSslKey0(v NewOutputElasticsearchSecretsSslKey0) error { +// FromD3securityConfig overwrites any union data inside the CreateConnectorConfig as the provided D3securityConfig +func (t *CreateConnectorConfig) FromD3securityConfig(v D3securityConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputElasticsearchSecretsSslKey0 performs a merge with any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key, using the provided NewOutputElasticsearchSecretsSslKey0 -func (t *NewOutputElasticsearch_Secrets_Ssl_Key) MergeNewOutputElasticsearchSecretsSslKey0(v NewOutputElasticsearchSecretsSslKey0) error { +// MergeD3securityConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided D3securityConfig +func (t *CreateConnectorConfig) MergeD3securityConfig(v D3securityConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12298,22 +13244,22 @@ func (t *NewOutputElasticsearch_Secrets_Ssl_Key) MergeNewOutputElasticsearchSecr return err } -// AsNewOutputElasticsearchSecretsSslKey1 returns the union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as a NewOutputElasticsearchSecretsSslKey1 -func (t NewOutputElasticsearch_Secrets_Ssl_Key) AsNewOutputElasticsearchSecretsSslKey1() (NewOutputElasticsearchSecretsSslKey1, error) { - var body NewOutputElasticsearchSecretsSslKey1 +// AsEmailConfig returns the union data inside the CreateConnectorConfig as a EmailConfig +func (t CreateConnectorConfig) AsEmailConfig() (EmailConfig, error) { + var body EmailConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputElasticsearchSecretsSslKey1 overwrites any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as the provided NewOutputElasticsearchSecretsSslKey1 -func (t *NewOutputElasticsearch_Secrets_Ssl_Key) FromNewOutputElasticsearchSecretsSslKey1(v NewOutputElasticsearchSecretsSslKey1) error { +// FromEmailConfig overwrites any union data inside the CreateConnectorConfig as the provided EmailConfig +func (t *CreateConnectorConfig) FromEmailConfig(v EmailConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputElasticsearchSecretsSslKey1 performs a merge with any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key, using the provided NewOutputElasticsearchSecretsSslKey1 -func (t *NewOutputElasticsearch_Secrets_Ssl_Key) MergeNewOutputElasticsearchSecretsSslKey1(v NewOutputElasticsearchSecretsSslKey1) error { +// MergeEmailConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided EmailConfig +func (t *CreateConnectorConfig) MergeEmailConfig(v EmailConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12324,32 +13270,22 @@ func (t *NewOutputElasticsearch_Secrets_Ssl_Key) MergeNewOutputElasticsearchSecr return err } -func (t NewOutputElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *NewOutputElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsNewOutputKafkaSecretsPassword0 returns the union data inside the NewOutputKafka_Secrets_Password as a NewOutputKafkaSecretsPassword0 -func (t NewOutputKafka_Secrets_Password) AsNewOutputKafkaSecretsPassword0() (NewOutputKafkaSecretsPassword0, error) { - var body NewOutputKafkaSecretsPassword0 +// AsGeminiConfig returns the union data inside the CreateConnectorConfig as a GeminiConfig +func (t CreateConnectorConfig) AsGeminiConfig() (GeminiConfig, error) { + var body GeminiConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputKafkaSecretsPassword0 overwrites any union data inside the NewOutputKafka_Secrets_Password as the provided NewOutputKafkaSecretsPassword0 -func (t *NewOutputKafka_Secrets_Password) FromNewOutputKafkaSecretsPassword0(v NewOutputKafkaSecretsPassword0) error { +// FromGeminiConfig overwrites any union data inside the CreateConnectorConfig as the provided GeminiConfig +func (t *CreateConnectorConfig) FromGeminiConfig(v GeminiConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputKafkaSecretsPassword0 performs a merge with any union data inside the NewOutputKafka_Secrets_Password, using the provided NewOutputKafkaSecretsPassword0 -func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword0(v NewOutputKafkaSecretsPassword0) error { +// MergeGeminiConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GeminiConfig +func (t *CreateConnectorConfig) MergeGeminiConfig(v GeminiConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12360,22 +13296,22 @@ func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword0(v return err } -// AsNewOutputKafkaSecretsPassword1 returns the union data inside the NewOutputKafka_Secrets_Password as a NewOutputKafkaSecretsPassword1 -func (t NewOutputKafka_Secrets_Password) AsNewOutputKafkaSecretsPassword1() (NewOutputKafkaSecretsPassword1, error) { - var body NewOutputKafkaSecretsPassword1 +// AsResilientConfig returns the union data inside the CreateConnectorConfig as a ResilientConfig +func (t CreateConnectorConfig) AsResilientConfig() (ResilientConfig, error) { + var body ResilientConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputKafkaSecretsPassword1 overwrites any union data inside the NewOutputKafka_Secrets_Password as the provided NewOutputKafkaSecretsPassword1 -func (t *NewOutputKafka_Secrets_Password) FromNewOutputKafkaSecretsPassword1(v NewOutputKafkaSecretsPassword1) error { +// FromResilientConfig overwrites any union data inside the CreateConnectorConfig as the provided ResilientConfig +func (t *CreateConnectorConfig) FromResilientConfig(v ResilientConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputKafkaSecretsPassword1 performs a merge with any union data inside the NewOutputKafka_Secrets_Password, using the provided NewOutputKafkaSecretsPassword1 -func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword1(v NewOutputKafkaSecretsPassword1) error { +// MergeResilientConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ResilientConfig +func (t *CreateConnectorConfig) MergeResilientConfig(v ResilientConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12386,32 +13322,22 @@ func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword1(v return err } -func (t NewOutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *NewOutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsNewOutputKafkaSecretsSslKey0 returns the union data inside the NewOutputKafka_Secrets_Ssl_Key as a NewOutputKafkaSecretsSslKey0 -func (t NewOutputKafka_Secrets_Ssl_Key) AsNewOutputKafkaSecretsSslKey0() (NewOutputKafkaSecretsSslKey0, error) { - var body NewOutputKafkaSecretsSslKey0 +// AsIndexConfig returns the union data inside the CreateConnectorConfig as a IndexConfig +func (t CreateConnectorConfig) AsIndexConfig() (IndexConfig, error) { + var body IndexConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputKafkaSecretsSslKey0 overwrites any union data inside the NewOutputKafka_Secrets_Ssl_Key as the provided NewOutputKafkaSecretsSslKey0 -func (t *NewOutputKafka_Secrets_Ssl_Key) FromNewOutputKafkaSecretsSslKey0(v NewOutputKafkaSecretsSslKey0) error { +// FromIndexConfig overwrites any union data inside the CreateConnectorConfig as the provided IndexConfig +func (t *CreateConnectorConfig) FromIndexConfig(v IndexConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputKafkaSecretsSslKey0 performs a merge with any union data inside the NewOutputKafka_Secrets_Ssl_Key, using the provided NewOutputKafkaSecretsSslKey0 -func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey0(v NewOutputKafkaSecretsSslKey0) error { +// MergeIndexConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided IndexConfig +func (t *CreateConnectorConfig) MergeIndexConfig(v IndexConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12422,22 +13348,22 @@ func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey0(v New return err } -// AsNewOutputKafkaSecretsSslKey1 returns the union data inside the NewOutputKafka_Secrets_Ssl_Key as a NewOutputKafkaSecretsSslKey1 -func (t NewOutputKafka_Secrets_Ssl_Key) AsNewOutputKafkaSecretsSslKey1() (NewOutputKafkaSecretsSslKey1, error) { - var body NewOutputKafkaSecretsSslKey1 +// AsJiraConfig returns the union data inside the CreateConnectorConfig as a JiraConfig +func (t CreateConnectorConfig) AsJiraConfig() (JiraConfig, error) { + var body JiraConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputKafkaSecretsSslKey1 overwrites any union data inside the NewOutputKafka_Secrets_Ssl_Key as the provided NewOutputKafkaSecretsSslKey1 -func (t *NewOutputKafka_Secrets_Ssl_Key) FromNewOutputKafkaSecretsSslKey1(v NewOutputKafkaSecretsSslKey1) error { +// FromJiraConfig overwrites any union data inside the CreateConnectorConfig as the provided JiraConfig +func (t *CreateConnectorConfig) FromJiraConfig(v JiraConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputKafkaSecretsSslKey1 performs a merge with any union data inside the NewOutputKafka_Secrets_Ssl_Key, using the provided NewOutputKafkaSecretsSslKey1 -func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey1(v NewOutputKafkaSecretsSslKey1) error { +// MergeJiraConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided JiraConfig +func (t *CreateConnectorConfig) MergeJiraConfig(v JiraConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12448,32 +13374,22 @@ func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey1(v New return err } -func (t NewOutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *NewOutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsNewOutputLogstashSecretsSslKey0 returns the union data inside the NewOutputLogstash_Secrets_Ssl_Key as a NewOutputLogstashSecretsSslKey0 -func (t NewOutputLogstash_Secrets_Ssl_Key) AsNewOutputLogstashSecretsSslKey0() (NewOutputLogstashSecretsSslKey0, error) { - var body NewOutputLogstashSecretsSslKey0 +// AsGenaiAzureConfig returns the union data inside the CreateConnectorConfig as a GenaiAzureConfig +func (t CreateConnectorConfig) AsGenaiAzureConfig() (GenaiAzureConfig, error) { + var body GenaiAzureConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputLogstashSecretsSslKey0 overwrites any union data inside the NewOutputLogstash_Secrets_Ssl_Key as the provided NewOutputLogstashSecretsSslKey0 -func (t *NewOutputLogstash_Secrets_Ssl_Key) FromNewOutputLogstashSecretsSslKey0(v NewOutputLogstashSecretsSslKey0) error { +// FromGenaiAzureConfig overwrites any union data inside the CreateConnectorConfig as the provided GenaiAzureConfig +func (t *CreateConnectorConfig) FromGenaiAzureConfig(v GenaiAzureConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputLogstashSecretsSslKey0 performs a merge with any union data inside the NewOutputLogstash_Secrets_Ssl_Key, using the provided NewOutputLogstashSecretsSslKey0 -func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey0(v NewOutputLogstashSecretsSslKey0) error { +// MergeGenaiAzureConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GenaiAzureConfig +func (t *CreateConnectorConfig) MergeGenaiAzureConfig(v GenaiAzureConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12484,22 +13400,22 @@ func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey0 return err } -// AsNewOutputLogstashSecretsSslKey1 returns the union data inside the NewOutputLogstash_Secrets_Ssl_Key as a NewOutputLogstashSecretsSslKey1 -func (t NewOutputLogstash_Secrets_Ssl_Key) AsNewOutputLogstashSecretsSslKey1() (NewOutputLogstashSecretsSslKey1, error) { - var body NewOutputLogstashSecretsSslKey1 +// AsGenaiOpenaiConfig returns the union data inside the CreateConnectorConfig as a GenaiOpenaiConfig +func (t CreateConnectorConfig) AsGenaiOpenaiConfig() (GenaiOpenaiConfig, error) { + var body GenaiOpenaiConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputLogstashSecretsSslKey1 overwrites any union data inside the NewOutputLogstash_Secrets_Ssl_Key as the provided NewOutputLogstashSecretsSslKey1 -func (t *NewOutputLogstash_Secrets_Ssl_Key) FromNewOutputLogstashSecretsSslKey1(v NewOutputLogstashSecretsSslKey1) error { +// FromGenaiOpenaiConfig overwrites any union data inside the CreateConnectorConfig as the provided GenaiOpenaiConfig +func (t *CreateConnectorConfig) FromGenaiOpenaiConfig(v GenaiOpenaiConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputLogstashSecretsSslKey1 performs a merge with any union data inside the NewOutputLogstash_Secrets_Ssl_Key, using the provided NewOutputLogstashSecretsSslKey1 -func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey1(v NewOutputLogstashSecretsSslKey1) error { +// MergeGenaiOpenaiConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GenaiOpenaiConfig +func (t *CreateConnectorConfig) MergeGenaiOpenaiConfig(v GenaiOpenaiConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12510,32 +13426,22 @@ func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey1 return err } -func (t NewOutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *NewOutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsNewOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as a NewOutputRemoteElasticsearchSecretsServiceToken0 -func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) AsNewOutputRemoteElasticsearchSecretsServiceToken0() (NewOutputRemoteElasticsearchSecretsServiceToken0, error) { - var body NewOutputRemoteElasticsearchSecretsServiceToken0 +// AsGenaiOpenaiOtherConfig returns the union data inside the CreateConnectorConfig as a GenaiOpenaiOtherConfig +func (t CreateConnectorConfig) AsGenaiOpenaiOtherConfig() (GenaiOpenaiOtherConfig, error) { + var body GenaiOpenaiOtherConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as the provided NewOutputRemoteElasticsearchSecretsServiceToken0 -func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) FromNewOutputRemoteElasticsearchSecretsServiceToken0(v NewOutputRemoteElasticsearchSecretsServiceToken0) error { +// FromGenaiOpenaiOtherConfig overwrites any union data inside the CreateConnectorConfig as the provided GenaiOpenaiOtherConfig +func (t *CreateConnectorConfig) FromGenaiOpenaiOtherConfig(v GenaiOpenaiOtherConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided NewOutputRemoteElasticsearchSecretsServiceToken0 -func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemoteElasticsearchSecretsServiceToken0(v NewOutputRemoteElasticsearchSecretsServiceToken0) error { +// MergeGenaiOpenaiOtherConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GenaiOpenaiOtherConfig +func (t *CreateConnectorConfig) MergeGenaiOpenaiOtherConfig(v GenaiOpenaiOtherConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12546,22 +13452,22 @@ func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemote return err } -// AsNewOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as a NewOutputRemoteElasticsearchSecretsServiceToken1 -func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) AsNewOutputRemoteElasticsearchSecretsServiceToken1() (NewOutputRemoteElasticsearchSecretsServiceToken1, error) { - var body NewOutputRemoteElasticsearchSecretsServiceToken1 +// AsOpsgenieConfig returns the union data inside the CreateConnectorConfig as a OpsgenieConfig +func (t CreateConnectorConfig) AsOpsgenieConfig() (OpsgenieConfig, error) { + var body OpsgenieConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as the provided NewOutputRemoteElasticsearchSecretsServiceToken1 -func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) FromNewOutputRemoteElasticsearchSecretsServiceToken1(v NewOutputRemoteElasticsearchSecretsServiceToken1) error { +// FromOpsgenieConfig overwrites any union data inside the CreateConnectorConfig as the provided OpsgenieConfig +func (t *CreateConnectorConfig) FromOpsgenieConfig(v OpsgenieConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided NewOutputRemoteElasticsearchSecretsServiceToken1 -func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemoteElasticsearchSecretsServiceToken1(v NewOutputRemoteElasticsearchSecretsServiceToken1) error { +// MergeOpsgenieConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided OpsgenieConfig +func (t *CreateConnectorConfig) MergeOpsgenieConfig(v OpsgenieConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12572,32 +13478,22 @@ func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemote return err } -func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsNewOutputRemoteElasticsearchSecretsSslKey0 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as a NewOutputRemoteElasticsearchSecretsSslKey0 -func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) AsNewOutputRemoteElasticsearchSecretsSslKey0() (NewOutputRemoteElasticsearchSecretsSslKey0, error) { - var body NewOutputRemoteElasticsearchSecretsSslKey0 +// AsPagerdutyConfig returns the union data inside the CreateConnectorConfig as a PagerdutyConfig +func (t CreateConnectorConfig) AsPagerdutyConfig() (PagerdutyConfig, error) { + var body PagerdutyConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputRemoteElasticsearchSecretsSslKey0 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided NewOutputRemoteElasticsearchSecretsSslKey0 -func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) FromNewOutputRemoteElasticsearchSecretsSslKey0(v NewOutputRemoteElasticsearchSecretsSslKey0) error { +// FromPagerdutyConfig overwrites any union data inside the CreateConnectorConfig as the provided PagerdutyConfig +func (t *CreateConnectorConfig) FromPagerdutyConfig(v PagerdutyConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputRemoteElasticsearchSecretsSslKey0 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided NewOutputRemoteElasticsearchSecretsSslKey0 -func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeNewOutputRemoteElasticsearchSecretsSslKey0(v NewOutputRemoteElasticsearchSecretsSslKey0) error { +// MergePagerdutyConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided PagerdutyConfig +func (t *CreateConnectorConfig) MergePagerdutyConfig(v PagerdutyConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12608,22 +13504,22 @@ func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeNewOutputRemoteElast return err } -// AsNewOutputRemoteElasticsearchSecretsSslKey1 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as a NewOutputRemoteElasticsearchSecretsSslKey1 -func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) AsNewOutputRemoteElasticsearchSecretsSslKey1() (NewOutputRemoteElasticsearchSecretsSslKey1, error) { - var body NewOutputRemoteElasticsearchSecretsSslKey1 +// AsSentineloneConfig returns the union data inside the CreateConnectorConfig as a SentineloneConfig +func (t CreateConnectorConfig) AsSentineloneConfig() (SentineloneConfig, error) { + var body SentineloneConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputRemoteElasticsearchSecretsSslKey1 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided NewOutputRemoteElasticsearchSecretsSslKey1 -func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) FromNewOutputRemoteElasticsearchSecretsSslKey1(v NewOutputRemoteElasticsearchSecretsSslKey1) error { +// FromSentineloneConfig overwrites any union data inside the CreateConnectorConfig as the provided SentineloneConfig +func (t *CreateConnectorConfig) FromSentineloneConfig(v SentineloneConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputRemoteElasticsearchSecretsSslKey1 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided NewOutputRemoteElasticsearchSecretsSslKey1 -func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeNewOutputRemoteElasticsearchSecretsSslKey1(v NewOutputRemoteElasticsearchSecretsSslKey1) error { +// MergeSentineloneConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided SentineloneConfig +func (t *CreateConnectorConfig) MergeSentineloneConfig(v SentineloneConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12634,32 +13530,22 @@ func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeNewOutputRemoteElast return err } -func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsNewOutputElasticsearch returns the union data inside the NewOutputUnion as a NewOutputElasticsearch -func (t NewOutputUnion) AsNewOutputElasticsearch() (NewOutputElasticsearch, error) { - var body NewOutputElasticsearch +// AsServicenowConfig returns the union data inside the CreateConnectorConfig as a ServicenowConfig +func (t CreateConnectorConfig) AsServicenowConfig() (ServicenowConfig, error) { + var body ServicenowConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputElasticsearch -func (t *NewOutputUnion) FromNewOutputElasticsearch(v NewOutputElasticsearch) error { +// FromServicenowConfig overwrites any union data inside the CreateConnectorConfig as the provided ServicenowConfig +func (t *CreateConnectorConfig) FromServicenowConfig(v ServicenowConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputElasticsearch -func (t *NewOutputUnion) MergeNewOutputElasticsearch(v NewOutputElasticsearch) error { +// MergeServicenowConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ServicenowConfig +func (t *CreateConnectorConfig) MergeServicenowConfig(v ServicenowConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12670,22 +13556,22 @@ func (t *NewOutputUnion) MergeNewOutputElasticsearch(v NewOutputElasticsearch) e return err } -// AsNewOutputRemoteElasticsearch returns the union data inside the NewOutputUnion as a NewOutputRemoteElasticsearch -func (t NewOutputUnion) AsNewOutputRemoteElasticsearch() (NewOutputRemoteElasticsearch, error) { - var body NewOutputRemoteElasticsearch +// AsServicenowItomConfig returns the union data inside the CreateConnectorConfig as a ServicenowItomConfig +func (t CreateConnectorConfig) AsServicenowItomConfig() (ServicenowItomConfig, error) { + var body ServicenowItomConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputRemoteElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputRemoteElasticsearch -func (t *NewOutputUnion) FromNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { +// FromServicenowItomConfig overwrites any union data inside the CreateConnectorConfig as the provided ServicenowItomConfig +func (t *CreateConnectorConfig) FromServicenowItomConfig(v ServicenowItomConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputRemoteElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputRemoteElasticsearch -func (t *NewOutputUnion) MergeNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { +// MergeServicenowItomConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ServicenowItomConfig +func (t *CreateConnectorConfig) MergeServicenowItomConfig(v ServicenowItomConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12696,22 +13582,22 @@ func (t *NewOutputUnion) MergeNewOutputRemoteElasticsearch(v NewOutputRemoteElas return err } -// AsNewOutputLogstash returns the union data inside the NewOutputUnion as a NewOutputLogstash -func (t NewOutputUnion) AsNewOutputLogstash() (NewOutputLogstash, error) { - var body NewOutputLogstash +// AsSlackApiConfig returns the union data inside the CreateConnectorConfig as a SlackApiConfig +func (t CreateConnectorConfig) AsSlackApiConfig() (SlackApiConfig, error) { + var body SlackApiConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputLogstash overwrites any union data inside the NewOutputUnion as the provided NewOutputLogstash -func (t *NewOutputUnion) FromNewOutputLogstash(v NewOutputLogstash) error { +// FromSlackApiConfig overwrites any union data inside the CreateConnectorConfig as the provided SlackApiConfig +func (t *CreateConnectorConfig) FromSlackApiConfig(v SlackApiConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputLogstash performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputLogstash -func (t *NewOutputUnion) MergeNewOutputLogstash(v NewOutputLogstash) error { +// MergeSlackApiConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided SlackApiConfig +func (t *CreateConnectorConfig) MergeSlackApiConfig(v SlackApiConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12722,22 +13608,22 @@ func (t *NewOutputUnion) MergeNewOutputLogstash(v NewOutputLogstash) error { return err } -// AsNewOutputKafka returns the union data inside the NewOutputUnion as a NewOutputKafka -func (t NewOutputUnion) AsNewOutputKafka() (NewOutputKafka, error) { - var body NewOutputKafka +// AsSwimlaneConfig returns the union data inside the CreateConnectorConfig as a SwimlaneConfig +func (t CreateConnectorConfig) AsSwimlaneConfig() (SwimlaneConfig, error) { + var body SwimlaneConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromNewOutputKafka overwrites any union data inside the NewOutputUnion as the provided NewOutputKafka -func (t *NewOutputUnion) FromNewOutputKafka(v NewOutputKafka) error { +// FromSwimlaneConfig overwrites any union data inside the CreateConnectorConfig as the provided SwimlaneConfig +func (t *CreateConnectorConfig) FromSwimlaneConfig(v SwimlaneConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeNewOutputKafka performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputKafka -func (t *NewOutputUnion) MergeNewOutputKafka(v NewOutputKafka) error { +// MergeSwimlaneConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided SwimlaneConfig +func (t *CreateConnectorConfig) MergeSwimlaneConfig(v SwimlaneConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12748,32 +13634,22 @@ func (t *NewOutputUnion) MergeNewOutputKafka(v NewOutputKafka) error { return err } -func (t NewOutputUnion) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *NewOutputUnion) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsOutputElasticsearchSecretsSslKey0 returns the union data inside the OutputElasticsearch_Secrets_Ssl_Key as a OutputElasticsearchSecretsSslKey0 -func (t OutputElasticsearch_Secrets_Ssl_Key) AsOutputElasticsearchSecretsSslKey0() (OutputElasticsearchSecretsSslKey0, error) { - var body OutputElasticsearchSecretsSslKey0 +// AsThehiveConfig returns the union data inside the CreateConnectorConfig as a ThehiveConfig +func (t CreateConnectorConfig) AsThehiveConfig() (ThehiveConfig, error) { + var body ThehiveConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputElasticsearchSecretsSslKey0 overwrites any union data inside the OutputElasticsearch_Secrets_Ssl_Key as the provided OutputElasticsearchSecretsSslKey0 -func (t *OutputElasticsearch_Secrets_Ssl_Key) FromOutputElasticsearchSecretsSslKey0(v OutputElasticsearchSecretsSslKey0) error { +// FromThehiveConfig overwrites any union data inside the CreateConnectorConfig as the provided ThehiveConfig +func (t *CreateConnectorConfig) FromThehiveConfig(v ThehiveConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputElasticsearchSecretsSslKey0 performs a merge with any union data inside the OutputElasticsearch_Secrets_Ssl_Key, using the provided OutputElasticsearchSecretsSslKey0 -func (t *OutputElasticsearch_Secrets_Ssl_Key) MergeOutputElasticsearchSecretsSslKey0(v OutputElasticsearchSecretsSslKey0) error { +// MergeThehiveConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ThehiveConfig +func (t *CreateConnectorConfig) MergeThehiveConfig(v ThehiveConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12784,22 +13660,22 @@ func (t *OutputElasticsearch_Secrets_Ssl_Key) MergeOutputElasticsearchSecretsSsl return err } -// AsOutputElasticsearchSecretsSslKey1 returns the union data inside the OutputElasticsearch_Secrets_Ssl_Key as a OutputElasticsearchSecretsSslKey1 -func (t OutputElasticsearch_Secrets_Ssl_Key) AsOutputElasticsearchSecretsSslKey1() (OutputElasticsearchSecretsSslKey1, error) { - var body OutputElasticsearchSecretsSslKey1 +// AsTinesConfig returns the union data inside the CreateConnectorConfig as a TinesConfig +func (t CreateConnectorConfig) AsTinesConfig() (TinesConfig, error) { + var body TinesConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputElasticsearchSecretsSslKey1 overwrites any union data inside the OutputElasticsearch_Secrets_Ssl_Key as the provided OutputElasticsearchSecretsSslKey1 -func (t *OutputElasticsearch_Secrets_Ssl_Key) FromOutputElasticsearchSecretsSslKey1(v OutputElasticsearchSecretsSslKey1) error { +// FromTinesConfig overwrites any union data inside the CreateConnectorConfig as the provided TinesConfig +func (t *CreateConnectorConfig) FromTinesConfig(v TinesConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputElasticsearchSecretsSslKey1 performs a merge with any union data inside the OutputElasticsearch_Secrets_Ssl_Key, using the provided OutputElasticsearchSecretsSslKey1 -func (t *OutputElasticsearch_Secrets_Ssl_Key) MergeOutputElasticsearchSecretsSslKey1(v OutputElasticsearchSecretsSslKey1) error { +// MergeTinesConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided TinesConfig +func (t *CreateConnectorConfig) MergeTinesConfig(v TinesConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12810,32 +13686,22 @@ func (t *OutputElasticsearch_Secrets_Ssl_Key) MergeOutputElasticsearchSecretsSsl return err } -func (t OutputElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *OutputElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsOutputKafkaSecretsPassword0 returns the union data inside the OutputKafka_Secrets_Password as a OutputKafkaSecretsPassword0 -func (t OutputKafka_Secrets_Password) AsOutputKafkaSecretsPassword0() (OutputKafkaSecretsPassword0, error) { - var body OutputKafkaSecretsPassword0 +// AsTorqConfig returns the union data inside the CreateConnectorConfig as a TorqConfig +func (t CreateConnectorConfig) AsTorqConfig() (TorqConfig, error) { + var body TorqConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputKafkaSecretsPassword0 overwrites any union data inside the OutputKafka_Secrets_Password as the provided OutputKafkaSecretsPassword0 -func (t *OutputKafka_Secrets_Password) FromOutputKafkaSecretsPassword0(v OutputKafkaSecretsPassword0) error { +// FromTorqConfig overwrites any union data inside the CreateConnectorConfig as the provided TorqConfig +func (t *CreateConnectorConfig) FromTorqConfig(v TorqConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputKafkaSecretsPassword0 performs a merge with any union data inside the OutputKafka_Secrets_Password, using the provided OutputKafkaSecretsPassword0 -func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword0(v OutputKafkaSecretsPassword0) error { +// MergeTorqConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided TorqConfig +func (t *CreateConnectorConfig) MergeTorqConfig(v TorqConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12846,22 +13712,22 @@ func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword0(v Output return err } -// AsOutputKafkaSecretsPassword1 returns the union data inside the OutputKafka_Secrets_Password as a OutputKafkaSecretsPassword1 -func (t OutputKafka_Secrets_Password) AsOutputKafkaSecretsPassword1() (OutputKafkaSecretsPassword1, error) { - var body OutputKafkaSecretsPassword1 +// AsWebhookConfig returns the union data inside the CreateConnectorConfig as a WebhookConfig +func (t CreateConnectorConfig) AsWebhookConfig() (WebhookConfig, error) { + var body WebhookConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputKafkaSecretsPassword1 overwrites any union data inside the OutputKafka_Secrets_Password as the provided OutputKafkaSecretsPassword1 -func (t *OutputKafka_Secrets_Password) FromOutputKafkaSecretsPassword1(v OutputKafkaSecretsPassword1) error { +// FromWebhookConfig overwrites any union data inside the CreateConnectorConfig as the provided WebhookConfig +func (t *CreateConnectorConfig) FromWebhookConfig(v WebhookConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputKafkaSecretsPassword1 performs a merge with any union data inside the OutputKafka_Secrets_Password, using the provided OutputKafkaSecretsPassword1 -func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword1(v OutputKafkaSecretsPassword1) error { +// MergeWebhookConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided WebhookConfig +func (t *CreateConnectorConfig) MergeWebhookConfig(v WebhookConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12872,32 +13738,22 @@ func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword1(v Output return err } -func (t OutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *OutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsOutputKafkaSecretsSslKey0 returns the union data inside the OutputKafka_Secrets_Ssl_Key as a OutputKafkaSecretsSslKey0 -func (t OutputKafka_Secrets_Ssl_Key) AsOutputKafkaSecretsSslKey0() (OutputKafkaSecretsSslKey0, error) { - var body OutputKafkaSecretsSslKey0 +// AsCasesWebhookConfig returns the union data inside the CreateConnectorConfig as a CasesWebhookConfig +func (t CreateConnectorConfig) AsCasesWebhookConfig() (CasesWebhookConfig, error) { + var body CasesWebhookConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputKafkaSecretsSslKey0 overwrites any union data inside the OutputKafka_Secrets_Ssl_Key as the provided OutputKafkaSecretsSslKey0 -func (t *OutputKafka_Secrets_Ssl_Key) FromOutputKafkaSecretsSslKey0(v OutputKafkaSecretsSslKey0) error { +// FromCasesWebhookConfig overwrites any union data inside the CreateConnectorConfig as the provided CasesWebhookConfig +func (t *CreateConnectorConfig) FromCasesWebhookConfig(v CasesWebhookConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputKafkaSecretsSslKey0 performs a merge with any union data inside the OutputKafka_Secrets_Ssl_Key, using the provided OutputKafkaSecretsSslKey0 -func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey0(v OutputKafkaSecretsSslKey0) error { +// MergeCasesWebhookConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided CasesWebhookConfig +func (t *CreateConnectorConfig) MergeCasesWebhookConfig(v CasesWebhookConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12908,22 +13764,22 @@ func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey0(v OutputKaf return err } -// AsOutputKafkaSecretsSslKey1 returns the union data inside the OutputKafka_Secrets_Ssl_Key as a OutputKafkaSecretsSslKey1 -func (t OutputKafka_Secrets_Ssl_Key) AsOutputKafkaSecretsSslKey1() (OutputKafkaSecretsSslKey1, error) { - var body OutputKafkaSecretsSslKey1 +// AsXmattersConfig returns the union data inside the CreateConnectorConfig as a XmattersConfig +func (t CreateConnectorConfig) AsXmattersConfig() (XmattersConfig, error) { + var body XmattersConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputKafkaSecretsSslKey1 overwrites any union data inside the OutputKafka_Secrets_Ssl_Key as the provided OutputKafkaSecretsSslKey1 -func (t *OutputKafka_Secrets_Ssl_Key) FromOutputKafkaSecretsSslKey1(v OutputKafkaSecretsSslKey1) error { +// FromXmattersConfig overwrites any union data inside the CreateConnectorConfig as the provided XmattersConfig +func (t *CreateConnectorConfig) FromXmattersConfig(v XmattersConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputKafkaSecretsSslKey1 performs a merge with any union data inside the OutputKafka_Secrets_Ssl_Key, using the provided OutputKafkaSecretsSslKey1 -func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey1(v OutputKafkaSecretsSslKey1) error { +// MergeXmattersConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided XmattersConfig +func (t *CreateConnectorConfig) MergeXmattersConfig(v XmattersConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -12934,32 +13790,22 @@ func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey1(v OutputKaf return err } -func (t OutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *OutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsOutputLogstashSecretsSslKey0 returns the union data inside the OutputLogstash_Secrets_Ssl_Key as a OutputLogstashSecretsSslKey0 -func (t OutputLogstash_Secrets_Ssl_Key) AsOutputLogstashSecretsSslKey0() (OutputLogstashSecretsSslKey0, error) { - var body OutputLogstashSecretsSslKey0 +// AsBedrockSecrets returns the union data inside the CreateConnectorSecrets as a BedrockSecrets +func (t CreateConnectorSecrets) AsBedrockSecrets() (BedrockSecrets, error) { + var body BedrockSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputLogstashSecretsSslKey0 overwrites any union data inside the OutputLogstash_Secrets_Ssl_Key as the provided OutputLogstashSecretsSslKey0 -func (t *OutputLogstash_Secrets_Ssl_Key) FromOutputLogstashSecretsSslKey0(v OutputLogstashSecretsSslKey0) error { +// FromBedrockSecrets overwrites any union data inside the CreateConnectorSecrets as the provided BedrockSecrets +func (t *CreateConnectorSecrets) FromBedrockSecrets(v BedrockSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputLogstashSecretsSslKey0 performs a merge with any union data inside the OutputLogstash_Secrets_Ssl_Key, using the provided OutputLogstashSecretsSslKey0 -func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey0(v OutputLogstashSecretsSslKey0) error { +// MergeBedrockSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided BedrockSecrets +func (t *CreateConnectorSecrets) MergeBedrockSecrets(v BedrockSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -12970,22 +13816,22 @@ func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey0(v Out return err } -// AsOutputLogstashSecretsSslKey1 returns the union data inside the OutputLogstash_Secrets_Ssl_Key as a OutputLogstashSecretsSslKey1 -func (t OutputLogstash_Secrets_Ssl_Key) AsOutputLogstashSecretsSslKey1() (OutputLogstashSecretsSslKey1, error) { - var body OutputLogstashSecretsSslKey1 +// AsCrowdstrikeSecrets returns the union data inside the CreateConnectorSecrets as a CrowdstrikeSecrets +func (t CreateConnectorSecrets) AsCrowdstrikeSecrets() (CrowdstrikeSecrets, error) { + var body CrowdstrikeSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputLogstashSecretsSslKey1 overwrites any union data inside the OutputLogstash_Secrets_Ssl_Key as the provided OutputLogstashSecretsSslKey1 -func (t *OutputLogstash_Secrets_Ssl_Key) FromOutputLogstashSecretsSslKey1(v OutputLogstashSecretsSslKey1) error { +// FromCrowdstrikeSecrets overwrites any union data inside the CreateConnectorSecrets as the provided CrowdstrikeSecrets +func (t *CreateConnectorSecrets) FromCrowdstrikeSecrets(v CrowdstrikeSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputLogstashSecretsSslKey1 performs a merge with any union data inside the OutputLogstash_Secrets_Ssl_Key, using the provided OutputLogstashSecretsSslKey1 -func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey1(v OutputLogstashSecretsSslKey1) error { +// MergeCrowdstrikeSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided CrowdstrikeSecrets +func (t *CreateConnectorSecrets) MergeCrowdstrikeSecrets(v CrowdstrikeSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -12996,32 +13842,2601 @@ func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey1(v Out return err } -func (t OutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() +// AsD3securitySecrets returns the union data inside the CreateConnectorSecrets as a D3securitySecrets +func (t CreateConnectorSecrets) AsD3securitySecrets() (D3securitySecrets, error) { + var body D3securitySecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromD3securitySecrets overwrites any union data inside the CreateConnectorSecrets as the provided D3securitySecrets +func (t *CreateConnectorSecrets) FromD3securitySecrets(v D3securitySecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeD3securitySecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided D3securitySecrets +func (t *CreateConnectorSecrets) MergeD3securitySecrets(v D3securitySecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsEmailSecrets returns the union data inside the CreateConnectorSecrets as a EmailSecrets +func (t CreateConnectorSecrets) AsEmailSecrets() (EmailSecrets, error) { + var body EmailSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromEmailSecrets overwrites any union data inside the CreateConnectorSecrets as the provided EmailSecrets +func (t *CreateConnectorSecrets) FromEmailSecrets(v EmailSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeEmailSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided EmailSecrets +func (t *CreateConnectorSecrets) MergeEmailSecrets(v EmailSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGeminiSecrets returns the union data inside the CreateConnectorSecrets as a GeminiSecrets +func (t CreateConnectorSecrets) AsGeminiSecrets() (GeminiSecrets, error) { + var body GeminiSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGeminiSecrets overwrites any union data inside the CreateConnectorSecrets as the provided GeminiSecrets +func (t *CreateConnectorSecrets) FromGeminiSecrets(v GeminiSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGeminiSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided GeminiSecrets +func (t *CreateConnectorSecrets) MergeGeminiSecrets(v GeminiSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsResilientSecrets returns the union data inside the CreateConnectorSecrets as a ResilientSecrets +func (t CreateConnectorSecrets) AsResilientSecrets() (ResilientSecrets, error) { + var body ResilientSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromResilientSecrets overwrites any union data inside the CreateConnectorSecrets as the provided ResilientSecrets +func (t *CreateConnectorSecrets) FromResilientSecrets(v ResilientSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeResilientSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided ResilientSecrets +func (t *CreateConnectorSecrets) MergeResilientSecrets(v ResilientSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsJiraSecrets returns the union data inside the CreateConnectorSecrets as a JiraSecrets +func (t CreateConnectorSecrets) AsJiraSecrets() (JiraSecrets, error) { + var body JiraSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromJiraSecrets overwrites any union data inside the CreateConnectorSecrets as the provided JiraSecrets +func (t *CreateConnectorSecrets) FromJiraSecrets(v JiraSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeJiraSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided JiraSecrets +func (t *CreateConnectorSecrets) MergeJiraSecrets(v JiraSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsDefenderSecrets returns the union data inside the CreateConnectorSecrets as a DefenderSecrets +func (t CreateConnectorSecrets) AsDefenderSecrets() (DefenderSecrets, error) { + var body DefenderSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromDefenderSecrets overwrites any union data inside the CreateConnectorSecrets as the provided DefenderSecrets +func (t *CreateConnectorSecrets) FromDefenderSecrets(v DefenderSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeDefenderSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided DefenderSecrets +func (t *CreateConnectorSecrets) MergeDefenderSecrets(v DefenderSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTeamsSecrets returns the union data inside the CreateConnectorSecrets as a TeamsSecrets +func (t CreateConnectorSecrets) AsTeamsSecrets() (TeamsSecrets, error) { + var body TeamsSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTeamsSecrets overwrites any union data inside the CreateConnectorSecrets as the provided TeamsSecrets +func (t *CreateConnectorSecrets) FromTeamsSecrets(v TeamsSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTeamsSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided TeamsSecrets +func (t *CreateConnectorSecrets) MergeTeamsSecrets(v TeamsSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGenaiSecrets returns the union data inside the CreateConnectorSecrets as a GenaiSecrets +func (t CreateConnectorSecrets) AsGenaiSecrets() (GenaiSecrets, error) { + var body GenaiSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGenaiSecrets overwrites any union data inside the CreateConnectorSecrets as the provided GenaiSecrets +func (t *CreateConnectorSecrets) FromGenaiSecrets(v GenaiSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGenaiSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided GenaiSecrets +func (t *CreateConnectorSecrets) MergeGenaiSecrets(v GenaiSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOpsgenieSecrets returns the union data inside the CreateConnectorSecrets as a OpsgenieSecrets +func (t CreateConnectorSecrets) AsOpsgenieSecrets() (OpsgenieSecrets, error) { + var body OpsgenieSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOpsgenieSecrets overwrites any union data inside the CreateConnectorSecrets as the provided OpsgenieSecrets +func (t *CreateConnectorSecrets) FromOpsgenieSecrets(v OpsgenieSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOpsgenieSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided OpsgenieSecrets +func (t *CreateConnectorSecrets) MergeOpsgenieSecrets(v OpsgenieSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPagerdutySecrets returns the union data inside the CreateConnectorSecrets as a PagerdutySecrets +func (t CreateConnectorSecrets) AsPagerdutySecrets() (PagerdutySecrets, error) { + var body PagerdutySecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPagerdutySecrets overwrites any union data inside the CreateConnectorSecrets as the provided PagerdutySecrets +func (t *CreateConnectorSecrets) FromPagerdutySecrets(v PagerdutySecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePagerdutySecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided PagerdutySecrets +func (t *CreateConnectorSecrets) MergePagerdutySecrets(v PagerdutySecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSentineloneSecrets returns the union data inside the CreateConnectorSecrets as a SentineloneSecrets +func (t CreateConnectorSecrets) AsSentineloneSecrets() (SentineloneSecrets, error) { + var body SentineloneSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSentineloneSecrets overwrites any union data inside the CreateConnectorSecrets as the provided SentineloneSecrets +func (t *CreateConnectorSecrets) FromSentineloneSecrets(v SentineloneSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSentineloneSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided SentineloneSecrets +func (t *CreateConnectorSecrets) MergeSentineloneSecrets(v SentineloneSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServicenowSecrets returns the union data inside the CreateConnectorSecrets as a ServicenowSecrets +func (t CreateConnectorSecrets) AsServicenowSecrets() (ServicenowSecrets, error) { + var body ServicenowSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServicenowSecrets overwrites any union data inside the CreateConnectorSecrets as the provided ServicenowSecrets +func (t *CreateConnectorSecrets) FromServicenowSecrets(v ServicenowSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServicenowSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided ServicenowSecrets +func (t *CreateConnectorSecrets) MergeServicenowSecrets(v ServicenowSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSlackApiSecrets returns the union data inside the CreateConnectorSecrets as a SlackApiSecrets +func (t CreateConnectorSecrets) AsSlackApiSecrets() (SlackApiSecrets, error) { + var body SlackApiSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSlackApiSecrets overwrites any union data inside the CreateConnectorSecrets as the provided SlackApiSecrets +func (t *CreateConnectorSecrets) FromSlackApiSecrets(v SlackApiSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSlackApiSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided SlackApiSecrets +func (t *CreateConnectorSecrets) MergeSlackApiSecrets(v SlackApiSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSwimlaneSecrets returns the union data inside the CreateConnectorSecrets as a SwimlaneSecrets +func (t CreateConnectorSecrets) AsSwimlaneSecrets() (SwimlaneSecrets, error) { + var body SwimlaneSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSwimlaneSecrets overwrites any union data inside the CreateConnectorSecrets as the provided SwimlaneSecrets +func (t *CreateConnectorSecrets) FromSwimlaneSecrets(v SwimlaneSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSwimlaneSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided SwimlaneSecrets +func (t *CreateConnectorSecrets) MergeSwimlaneSecrets(v SwimlaneSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsThehiveSecrets returns the union data inside the CreateConnectorSecrets as a ThehiveSecrets +func (t CreateConnectorSecrets) AsThehiveSecrets() (ThehiveSecrets, error) { + var body ThehiveSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromThehiveSecrets overwrites any union data inside the CreateConnectorSecrets as the provided ThehiveSecrets +func (t *CreateConnectorSecrets) FromThehiveSecrets(v ThehiveSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeThehiveSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided ThehiveSecrets +func (t *CreateConnectorSecrets) MergeThehiveSecrets(v ThehiveSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTinesSecrets returns the union data inside the CreateConnectorSecrets as a TinesSecrets +func (t CreateConnectorSecrets) AsTinesSecrets() (TinesSecrets, error) { + var body TinesSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTinesSecrets overwrites any union data inside the CreateConnectorSecrets as the provided TinesSecrets +func (t *CreateConnectorSecrets) FromTinesSecrets(v TinesSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTinesSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided TinesSecrets +func (t *CreateConnectorSecrets) MergeTinesSecrets(v TinesSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTorqSecrets returns the union data inside the CreateConnectorSecrets as a TorqSecrets +func (t CreateConnectorSecrets) AsTorqSecrets() (TorqSecrets, error) { + var body TorqSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTorqSecrets overwrites any union data inside the CreateConnectorSecrets as the provided TorqSecrets +func (t *CreateConnectorSecrets) FromTorqSecrets(v TorqSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTorqSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided TorqSecrets +func (t *CreateConnectorSecrets) MergeTorqSecrets(v TorqSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsWebhookSecrets returns the union data inside the CreateConnectorSecrets as a WebhookSecrets +func (t CreateConnectorSecrets) AsWebhookSecrets() (WebhookSecrets, error) { + var body WebhookSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromWebhookSecrets overwrites any union data inside the CreateConnectorSecrets as the provided WebhookSecrets +func (t *CreateConnectorSecrets) FromWebhookSecrets(v WebhookSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeWebhookSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided WebhookSecrets +func (t *CreateConnectorSecrets) MergeWebhookSecrets(v WebhookSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesWebhookSecrets returns the union data inside the CreateConnectorSecrets as a CasesWebhookSecrets +func (t CreateConnectorSecrets) AsCasesWebhookSecrets() (CasesWebhookSecrets, error) { + var body CasesWebhookSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesWebhookSecrets overwrites any union data inside the CreateConnectorSecrets as the provided CasesWebhookSecrets +func (t *CreateConnectorSecrets) FromCasesWebhookSecrets(v CasesWebhookSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesWebhookSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided CasesWebhookSecrets +func (t *CreateConnectorSecrets) MergeCasesWebhookSecrets(v CasesWebhookSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsXmattersSecrets returns the union data inside the CreateConnectorSecrets as a XmattersSecrets +func (t CreateConnectorSecrets) AsXmattersSecrets() (XmattersSecrets, error) { + var body XmattersSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromXmattersSecrets overwrites any union data inside the CreateConnectorSecrets as the provided XmattersSecrets +func (t *CreateConnectorSecrets) FromXmattersSecrets(v XmattersSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeXmattersSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided XmattersSecrets +func (t *CreateConnectorSecrets) MergeXmattersSecrets(v XmattersSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCreateParamResponse0 returns the union data inside the CreateParamResponse as a CreateParamResponse0 +func (t CreateParamResponse) AsCreateParamResponse0() (CreateParamResponse0, error) { + var body CreateParamResponse0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCreateParamResponse0 overwrites any union data inside the CreateParamResponse as the provided CreateParamResponse0 +func (t *CreateParamResponse) FromCreateParamResponse0(v CreateParamResponse0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCreateParamResponse0 performs a merge with any union data inside the CreateParamResponse, using the provided CreateParamResponse0 +func (t *CreateParamResponse) MergeCreateParamResponse0(v CreateParamResponse0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSyntheticsPostParameterResponse returns the union data inside the CreateParamResponse as a SyntheticsPostParameterResponse +func (t CreateParamResponse) AsSyntheticsPostParameterResponse() (SyntheticsPostParameterResponse, error) { + var body SyntheticsPostParameterResponse + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSyntheticsPostParameterResponse overwrites any union data inside the CreateParamResponse as the provided SyntheticsPostParameterResponse +func (t *CreateParamResponse) FromSyntheticsPostParameterResponse(v SyntheticsPostParameterResponse) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSyntheticsPostParameterResponse performs a merge with any union data inside the CreateParamResponse, using the provided SyntheticsPostParameterResponse +func (t *CreateParamResponse) MergeSyntheticsPostParameterResponse(v SyntheticsPostParameterResponse) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CreateParamResponse) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CreateParamResponse) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputElasticsearchSecretsSslKey0 returns the union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as a NewOutputElasticsearchSecretsSslKey0 +func (t NewOutputElasticsearch_Secrets_Ssl_Key) AsNewOutputElasticsearchSecretsSslKey0() (NewOutputElasticsearchSecretsSslKey0, error) { + var body NewOutputElasticsearchSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputElasticsearchSecretsSslKey0 overwrites any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as the provided NewOutputElasticsearchSecretsSslKey0 +func (t *NewOutputElasticsearch_Secrets_Ssl_Key) FromNewOutputElasticsearchSecretsSslKey0(v NewOutputElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputElasticsearchSecretsSslKey0 performs a merge with any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key, using the provided NewOutputElasticsearchSecretsSslKey0 +func (t *NewOutputElasticsearch_Secrets_Ssl_Key) MergeNewOutputElasticsearchSecretsSslKey0(v NewOutputElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputElasticsearchSecretsSslKey1 returns the union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as a NewOutputElasticsearchSecretsSslKey1 +func (t NewOutputElasticsearch_Secrets_Ssl_Key) AsNewOutputElasticsearchSecretsSslKey1() (NewOutputElasticsearchSecretsSslKey1, error) { + var body NewOutputElasticsearchSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputElasticsearchSecretsSslKey1 overwrites any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as the provided NewOutputElasticsearchSecretsSslKey1 +func (t *NewOutputElasticsearch_Secrets_Ssl_Key) FromNewOutputElasticsearchSecretsSslKey1(v NewOutputElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputElasticsearchSecretsSslKey1 performs a merge with any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key, using the provided NewOutputElasticsearchSecretsSslKey1 +func (t *NewOutputElasticsearch_Secrets_Ssl_Key) MergeNewOutputElasticsearchSecretsSslKey1(v NewOutputElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputKafkaSecretsPassword0 returns the union data inside the NewOutputKafka_Secrets_Password as a NewOutputKafkaSecretsPassword0 +func (t NewOutputKafka_Secrets_Password) AsNewOutputKafkaSecretsPassword0() (NewOutputKafkaSecretsPassword0, error) { + var body NewOutputKafkaSecretsPassword0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsPassword0 overwrites any union data inside the NewOutputKafka_Secrets_Password as the provided NewOutputKafkaSecretsPassword0 +func (t *NewOutputKafka_Secrets_Password) FromNewOutputKafkaSecretsPassword0(v NewOutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsPassword0 performs a merge with any union data inside the NewOutputKafka_Secrets_Password, using the provided NewOutputKafkaSecretsPassword0 +func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword0(v NewOutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputKafkaSecretsPassword1 returns the union data inside the NewOutputKafka_Secrets_Password as a NewOutputKafkaSecretsPassword1 +func (t NewOutputKafka_Secrets_Password) AsNewOutputKafkaSecretsPassword1() (NewOutputKafkaSecretsPassword1, error) { + var body NewOutputKafkaSecretsPassword1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsPassword1 overwrites any union data inside the NewOutputKafka_Secrets_Password as the provided NewOutputKafkaSecretsPassword1 +func (t *NewOutputKafka_Secrets_Password) FromNewOutputKafkaSecretsPassword1(v NewOutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsPassword1 performs a merge with any union data inside the NewOutputKafka_Secrets_Password, using the provided NewOutputKafkaSecretsPassword1 +func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword1(v NewOutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputKafkaSecretsSslKey0 returns the union data inside the NewOutputKafka_Secrets_Ssl_Key as a NewOutputKafkaSecretsSslKey0 +func (t NewOutputKafka_Secrets_Ssl_Key) AsNewOutputKafkaSecretsSslKey0() (NewOutputKafkaSecretsSslKey0, error) { + var body NewOutputKafkaSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsSslKey0 overwrites any union data inside the NewOutputKafka_Secrets_Ssl_Key as the provided NewOutputKafkaSecretsSslKey0 +func (t *NewOutputKafka_Secrets_Ssl_Key) FromNewOutputKafkaSecretsSslKey0(v NewOutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsSslKey0 performs a merge with any union data inside the NewOutputKafka_Secrets_Ssl_Key, using the provided NewOutputKafkaSecretsSslKey0 +func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey0(v NewOutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputKafkaSecretsSslKey1 returns the union data inside the NewOutputKafka_Secrets_Ssl_Key as a NewOutputKafkaSecretsSslKey1 +func (t NewOutputKafka_Secrets_Ssl_Key) AsNewOutputKafkaSecretsSslKey1() (NewOutputKafkaSecretsSslKey1, error) { + var body NewOutputKafkaSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsSslKey1 overwrites any union data inside the NewOutputKafka_Secrets_Ssl_Key as the provided NewOutputKafkaSecretsSslKey1 +func (t *NewOutputKafka_Secrets_Ssl_Key) FromNewOutputKafkaSecretsSslKey1(v NewOutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsSslKey1 performs a merge with any union data inside the NewOutputKafka_Secrets_Ssl_Key, using the provided NewOutputKafkaSecretsSslKey1 +func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey1(v NewOutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputLogstashSecretsSslKey0 returns the union data inside the NewOutputLogstash_Secrets_Ssl_Key as a NewOutputLogstashSecretsSslKey0 +func (t NewOutputLogstash_Secrets_Ssl_Key) AsNewOutputLogstashSecretsSslKey0() (NewOutputLogstashSecretsSslKey0, error) { + var body NewOutputLogstashSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputLogstashSecretsSslKey0 overwrites any union data inside the NewOutputLogstash_Secrets_Ssl_Key as the provided NewOutputLogstashSecretsSslKey0 +func (t *NewOutputLogstash_Secrets_Ssl_Key) FromNewOutputLogstashSecretsSslKey0(v NewOutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputLogstashSecretsSslKey0 performs a merge with any union data inside the NewOutputLogstash_Secrets_Ssl_Key, using the provided NewOutputLogstashSecretsSslKey0 +func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey0(v NewOutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputLogstashSecretsSslKey1 returns the union data inside the NewOutputLogstash_Secrets_Ssl_Key as a NewOutputLogstashSecretsSslKey1 +func (t NewOutputLogstash_Secrets_Ssl_Key) AsNewOutputLogstashSecretsSslKey1() (NewOutputLogstashSecretsSslKey1, error) { + var body NewOutputLogstashSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputLogstashSecretsSslKey1 overwrites any union data inside the NewOutputLogstash_Secrets_Ssl_Key as the provided NewOutputLogstashSecretsSslKey1 +func (t *NewOutputLogstash_Secrets_Ssl_Key) FromNewOutputLogstashSecretsSslKey1(v NewOutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputLogstashSecretsSslKey1 performs a merge with any union data inside the NewOutputLogstash_Secrets_Ssl_Key, using the provided NewOutputLogstashSecretsSslKey1 +func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey1(v NewOutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as a NewOutputRemoteElasticsearchSecretsServiceToken0 +func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) AsNewOutputRemoteElasticsearchSecretsServiceToken0() (NewOutputRemoteElasticsearchSecretsServiceToken0, error) { + var body NewOutputRemoteElasticsearchSecretsServiceToken0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as the provided NewOutputRemoteElasticsearchSecretsServiceToken0 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) FromNewOutputRemoteElasticsearchSecretsServiceToken0(v NewOutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided NewOutputRemoteElasticsearchSecretsServiceToken0 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemoteElasticsearchSecretsServiceToken0(v NewOutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as a NewOutputRemoteElasticsearchSecretsServiceToken1 +func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) AsNewOutputRemoteElasticsearchSecretsServiceToken1() (NewOutputRemoteElasticsearchSecretsServiceToken1, error) { + var body NewOutputRemoteElasticsearchSecretsServiceToken1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as the provided NewOutputRemoteElasticsearchSecretsServiceToken1 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) FromNewOutputRemoteElasticsearchSecretsServiceToken1(v NewOutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided NewOutputRemoteElasticsearchSecretsServiceToken1 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemoteElasticsearchSecretsServiceToken1(v NewOutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputRemoteElasticsearchSecretsSslKey0 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as a NewOutputRemoteElasticsearchSecretsSslKey0 +func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) AsNewOutputRemoteElasticsearchSecretsSslKey0() (NewOutputRemoteElasticsearchSecretsSslKey0, error) { + var body NewOutputRemoteElasticsearchSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearchSecretsSslKey0 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided NewOutputRemoteElasticsearchSecretsSslKey0 +func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) FromNewOutputRemoteElasticsearchSecretsSslKey0(v NewOutputRemoteElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearchSecretsSslKey0 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided NewOutputRemoteElasticsearchSecretsSslKey0 +func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeNewOutputRemoteElasticsearchSecretsSslKey0(v NewOutputRemoteElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputRemoteElasticsearchSecretsSslKey1 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as a NewOutputRemoteElasticsearchSecretsSslKey1 +func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) AsNewOutputRemoteElasticsearchSecretsSslKey1() (NewOutputRemoteElasticsearchSecretsSslKey1, error) { + var body NewOutputRemoteElasticsearchSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearchSecretsSslKey1 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided NewOutputRemoteElasticsearchSecretsSslKey1 +func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) FromNewOutputRemoteElasticsearchSecretsSslKey1(v NewOutputRemoteElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearchSecretsSslKey1 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided NewOutputRemoteElasticsearchSecretsSslKey1 +func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeNewOutputRemoteElasticsearchSecretsSslKey1(v NewOutputRemoteElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputElasticsearch returns the union data inside the NewOutputUnion as a NewOutputElasticsearch +func (t NewOutputUnion) AsNewOutputElasticsearch() (NewOutputElasticsearch, error) { + var body NewOutputElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputElasticsearch +func (t *NewOutputUnion) FromNewOutputElasticsearch(v NewOutputElasticsearch) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputElasticsearch +func (t *NewOutputUnion) MergeNewOutputElasticsearch(v NewOutputElasticsearch) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputRemoteElasticsearch returns the union data inside the NewOutputUnion as a NewOutputRemoteElasticsearch +func (t NewOutputUnion) AsNewOutputRemoteElasticsearch() (NewOutputRemoteElasticsearch, error) { + var body NewOutputRemoteElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputRemoteElasticsearch +func (t *NewOutputUnion) FromNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputRemoteElasticsearch +func (t *NewOutputUnion) MergeNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputLogstash returns the union data inside the NewOutputUnion as a NewOutputLogstash +func (t NewOutputUnion) AsNewOutputLogstash() (NewOutputLogstash, error) { + var body NewOutputLogstash + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputLogstash overwrites any union data inside the NewOutputUnion as the provided NewOutputLogstash +func (t *NewOutputUnion) FromNewOutputLogstash(v NewOutputLogstash) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputLogstash performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputLogstash +func (t *NewOutputUnion) MergeNewOutputLogstash(v NewOutputLogstash) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputKafka returns the union data inside the NewOutputUnion as a NewOutputKafka +func (t NewOutputUnion) AsNewOutputKafka() (NewOutputKafka, error) { + var body NewOutputKafka + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafka overwrites any union data inside the NewOutputUnion as the provided NewOutputKafka +func (t *NewOutputUnion) FromNewOutputKafka(v NewOutputKafka) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafka performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputKafka +func (t *NewOutputUnion) MergeNewOutputKafka(v NewOutputKafka) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputUnion) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputUnion) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputElasticsearchSecretsSslKey0 returns the union data inside the OutputElasticsearch_Secrets_Ssl_Key as a OutputElasticsearchSecretsSslKey0 +func (t OutputElasticsearch_Secrets_Ssl_Key) AsOutputElasticsearchSecretsSslKey0() (OutputElasticsearchSecretsSslKey0, error) { + var body OutputElasticsearchSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputElasticsearchSecretsSslKey0 overwrites any union data inside the OutputElasticsearch_Secrets_Ssl_Key as the provided OutputElasticsearchSecretsSslKey0 +func (t *OutputElasticsearch_Secrets_Ssl_Key) FromOutputElasticsearchSecretsSslKey0(v OutputElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputElasticsearchSecretsSslKey0 performs a merge with any union data inside the OutputElasticsearch_Secrets_Ssl_Key, using the provided OutputElasticsearchSecretsSslKey0 +func (t *OutputElasticsearch_Secrets_Ssl_Key) MergeOutputElasticsearchSecretsSslKey0(v OutputElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputElasticsearchSecretsSslKey1 returns the union data inside the OutputElasticsearch_Secrets_Ssl_Key as a OutputElasticsearchSecretsSslKey1 +func (t OutputElasticsearch_Secrets_Ssl_Key) AsOutputElasticsearchSecretsSslKey1() (OutputElasticsearchSecretsSslKey1, error) { + var body OutputElasticsearchSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputElasticsearchSecretsSslKey1 overwrites any union data inside the OutputElasticsearch_Secrets_Ssl_Key as the provided OutputElasticsearchSecretsSslKey1 +func (t *OutputElasticsearch_Secrets_Ssl_Key) FromOutputElasticsearchSecretsSslKey1(v OutputElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputElasticsearchSecretsSslKey1 performs a merge with any union data inside the OutputElasticsearch_Secrets_Ssl_Key, using the provided OutputElasticsearchSecretsSslKey1 +func (t *OutputElasticsearch_Secrets_Ssl_Key) MergeOutputElasticsearchSecretsSslKey1(v OutputElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputKafkaSecretsPassword0 returns the union data inside the OutputKafka_Secrets_Password as a OutputKafkaSecretsPassword0 +func (t OutputKafka_Secrets_Password) AsOutputKafkaSecretsPassword0() (OutputKafkaSecretsPassword0, error) { + var body OutputKafkaSecretsPassword0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsPassword0 overwrites any union data inside the OutputKafka_Secrets_Password as the provided OutputKafkaSecretsPassword0 +func (t *OutputKafka_Secrets_Password) FromOutputKafkaSecretsPassword0(v OutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsPassword0 performs a merge with any union data inside the OutputKafka_Secrets_Password, using the provided OutputKafkaSecretsPassword0 +func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword0(v OutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputKafkaSecretsPassword1 returns the union data inside the OutputKafka_Secrets_Password as a OutputKafkaSecretsPassword1 +func (t OutputKafka_Secrets_Password) AsOutputKafkaSecretsPassword1() (OutputKafkaSecretsPassword1, error) { + var body OutputKafkaSecretsPassword1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsPassword1 overwrites any union data inside the OutputKafka_Secrets_Password as the provided OutputKafkaSecretsPassword1 +func (t *OutputKafka_Secrets_Password) FromOutputKafkaSecretsPassword1(v OutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsPassword1 performs a merge with any union data inside the OutputKafka_Secrets_Password, using the provided OutputKafkaSecretsPassword1 +func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword1(v OutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputKafkaSecretsSslKey0 returns the union data inside the OutputKafka_Secrets_Ssl_Key as a OutputKafkaSecretsSslKey0 +func (t OutputKafka_Secrets_Ssl_Key) AsOutputKafkaSecretsSslKey0() (OutputKafkaSecretsSslKey0, error) { + var body OutputKafkaSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsSslKey0 overwrites any union data inside the OutputKafka_Secrets_Ssl_Key as the provided OutputKafkaSecretsSslKey0 +func (t *OutputKafka_Secrets_Ssl_Key) FromOutputKafkaSecretsSslKey0(v OutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsSslKey0 performs a merge with any union data inside the OutputKafka_Secrets_Ssl_Key, using the provided OutputKafkaSecretsSslKey0 +func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey0(v OutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputKafkaSecretsSslKey1 returns the union data inside the OutputKafka_Secrets_Ssl_Key as a OutputKafkaSecretsSslKey1 +func (t OutputKafka_Secrets_Ssl_Key) AsOutputKafkaSecretsSslKey1() (OutputKafkaSecretsSslKey1, error) { + var body OutputKafkaSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsSslKey1 overwrites any union data inside the OutputKafka_Secrets_Ssl_Key as the provided OutputKafkaSecretsSslKey1 +func (t *OutputKafka_Secrets_Ssl_Key) FromOutputKafkaSecretsSslKey1(v OutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsSslKey1 performs a merge with any union data inside the OutputKafka_Secrets_Ssl_Key, using the provided OutputKafkaSecretsSslKey1 +func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey1(v OutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputLogstashSecretsSslKey0 returns the union data inside the OutputLogstash_Secrets_Ssl_Key as a OutputLogstashSecretsSslKey0 +func (t OutputLogstash_Secrets_Ssl_Key) AsOutputLogstashSecretsSslKey0() (OutputLogstashSecretsSslKey0, error) { + var body OutputLogstashSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputLogstashSecretsSslKey0 overwrites any union data inside the OutputLogstash_Secrets_Ssl_Key as the provided OutputLogstashSecretsSslKey0 +func (t *OutputLogstash_Secrets_Ssl_Key) FromOutputLogstashSecretsSslKey0(v OutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputLogstashSecretsSslKey0 performs a merge with any union data inside the OutputLogstash_Secrets_Ssl_Key, using the provided OutputLogstashSecretsSslKey0 +func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey0(v OutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputLogstashSecretsSslKey1 returns the union data inside the OutputLogstash_Secrets_Ssl_Key as a OutputLogstashSecretsSslKey1 +func (t OutputLogstash_Secrets_Ssl_Key) AsOutputLogstashSecretsSslKey1() (OutputLogstashSecretsSslKey1, error) { + var body OutputLogstashSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputLogstashSecretsSslKey1 overwrites any union data inside the OutputLogstash_Secrets_Ssl_Key as the provided OutputLogstashSecretsSslKey1 +func (t *OutputLogstash_Secrets_Ssl_Key) FromOutputLogstashSecretsSslKey1(v OutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputLogstashSecretsSslKey1 performs a merge with any union data inside the OutputLogstash_Secrets_Ssl_Key, using the provided OutputLogstashSecretsSslKey1 +func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey1(v OutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as a OutputRemoteElasticsearchSecretsServiceToken0 +func (t OutputRemoteElasticsearch_Secrets_ServiceToken) AsOutputRemoteElasticsearchSecretsServiceToken0() (OutputRemoteElasticsearchSecretsServiceToken0, error) { + var body OutputRemoteElasticsearchSecretsServiceToken0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as the provided OutputRemoteElasticsearchSecretsServiceToken0 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) FromOutputRemoteElasticsearchSecretsServiceToken0(v OutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken, using the provided OutputRemoteElasticsearchSecretsServiceToken0 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasticsearchSecretsServiceToken0(v OutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as a OutputRemoteElasticsearchSecretsServiceToken1 +func (t OutputRemoteElasticsearch_Secrets_ServiceToken) AsOutputRemoteElasticsearchSecretsServiceToken1() (OutputRemoteElasticsearchSecretsServiceToken1, error) { + var body OutputRemoteElasticsearchSecretsServiceToken1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as the provided OutputRemoteElasticsearchSecretsServiceToken1 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) FromOutputRemoteElasticsearchSecretsServiceToken1(v OutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken, using the provided OutputRemoteElasticsearchSecretsServiceToken1 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasticsearchSecretsServiceToken1(v OutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputRemoteElasticsearchSecretsSslKey0 returns the union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as a OutputRemoteElasticsearchSecretsSslKey0 +func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) AsOutputRemoteElasticsearchSecretsSslKey0() (OutputRemoteElasticsearchSecretsSslKey0, error) { + var body OutputRemoteElasticsearchSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearchSecretsSslKey0 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as the provided OutputRemoteElasticsearchSecretsSslKey0 +func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) FromOutputRemoteElasticsearchSecretsSslKey0(v OutputRemoteElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearchSecretsSslKey0 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided OutputRemoteElasticsearchSecretsSslKey0 +func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) MergeOutputRemoteElasticsearchSecretsSslKey0(v OutputRemoteElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputRemoteElasticsearchSecretsSslKey1 returns the union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as a OutputRemoteElasticsearchSecretsSslKey1 +func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) AsOutputRemoteElasticsearchSecretsSslKey1() (OutputRemoteElasticsearchSecretsSslKey1, error) { + var body OutputRemoteElasticsearchSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearchSecretsSslKey1 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as the provided OutputRemoteElasticsearchSecretsSslKey1 +func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) FromOutputRemoteElasticsearchSecretsSslKey1(v OutputRemoteElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearchSecretsSslKey1 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided OutputRemoteElasticsearchSecretsSslKey1 +func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) MergeOutputRemoteElasticsearchSecretsSslKey1(v OutputRemoteElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputElasticsearch returns the union data inside the OutputUnion as a OutputElasticsearch +func (t OutputUnion) AsOutputElasticsearch() (OutputElasticsearch, error) { + var body OutputElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputElasticsearch overwrites any union data inside the OutputUnion as the provided OutputElasticsearch +func (t *OutputUnion) FromOutputElasticsearch(v OutputElasticsearch) error { + v.Type = "elasticsearch" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputElasticsearch performs a merge with any union data inside the OutputUnion, using the provided OutputElasticsearch +func (t *OutputUnion) MergeOutputElasticsearch(v OutputElasticsearch) error { + v.Type = "elasticsearch" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputRemoteElasticsearch returns the union data inside the OutputUnion as a OutputRemoteElasticsearch +func (t OutputUnion) AsOutputRemoteElasticsearch() (OutputRemoteElasticsearch, error) { + var body OutputRemoteElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearch overwrites any union data inside the OutputUnion as the provided OutputRemoteElasticsearch +func (t *OutputUnion) FromOutputRemoteElasticsearch(v OutputRemoteElasticsearch) error { + v.Type = "remote_elasticsearch" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearch performs a merge with any union data inside the OutputUnion, using the provided OutputRemoteElasticsearch +func (t *OutputUnion) MergeOutputRemoteElasticsearch(v OutputRemoteElasticsearch) error { + v.Type = "remote_elasticsearch" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputLogstash returns the union data inside the OutputUnion as a OutputLogstash +func (t OutputUnion) AsOutputLogstash() (OutputLogstash, error) { + var body OutputLogstash + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputLogstash overwrites any union data inside the OutputUnion as the provided OutputLogstash +func (t *OutputUnion) FromOutputLogstash(v OutputLogstash) error { + v.Type = "logstash" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputLogstash performs a merge with any union data inside the OutputUnion, using the provided OutputLogstash +func (t *OutputUnion) MergeOutputLogstash(v OutputLogstash) error { + v.Type = "logstash" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputKafka returns the union data inside the OutputUnion as a OutputKafka +func (t OutputUnion) AsOutputKafka() (OutputKafka, error) { + var body OutputKafka + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafka overwrites any union data inside the OutputUnion as the provided OutputKafka +func (t *OutputUnion) FromOutputKafka(v OutputKafka) error { + v.Type = "kafka" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafka performs a merge with any union data inside the OutputUnion, using the provided OutputKafka +func (t *OutputUnion) MergeOutputKafka(v OutputKafka) error { + v.Type = "kafka" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputUnion) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"type"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err +} + +func (t OutputUnion) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case "elasticsearch": + return t.AsOutputElasticsearch() + case "kafka": + return t.AsOutputKafka() + case "logstash": + return t.AsOutputLogstash() + case "remote_elasticsearch": + return t.AsOutputRemoteElasticsearch() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } +} + +func (t OutputUnion) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputUnion) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 returns the union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0() (PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0, error) { + var body PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 overwrites any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 performs a merge with any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 returns the union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1() (PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1, error) { + var body PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 overwrites any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 performs a merge with any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageInfoInstallationInfoInstalledKibanaType0 returns the union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as a PackageInfoInstallationInfoInstalledKibanaType0 +func (t PackageInfo_InstallationInfo_InstalledKibana_Type) AsPackageInfoInstallationInfoInstalledKibanaType0() (PackageInfoInstallationInfoInstalledKibanaType0, error) { + var body PackageInfoInstallationInfoInstalledKibanaType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoInstallationInfoInstalledKibanaType0 overwrites any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as the provided PackageInfoInstallationInfoInstalledKibanaType0 +func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) FromPackageInfoInstallationInfoInstalledKibanaType0(v PackageInfoInstallationInfoInstalledKibanaType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoInstallationInfoInstalledKibanaType0 performs a merge with any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type, using the provided PackageInfoInstallationInfoInstalledKibanaType0 +func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) MergePackageInfoInstallationInfoInstalledKibanaType0(v PackageInfoInstallationInfoInstalledKibanaType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageInfoInstallationInfoInstalledKibanaType1 returns the union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as a PackageInfoInstallationInfoInstalledKibanaType1 +func (t PackageInfo_InstallationInfo_InstalledKibana_Type) AsPackageInfoInstallationInfoInstalledKibanaType1() (PackageInfoInstallationInfoInstalledKibanaType1, error) { + var body PackageInfoInstallationInfoInstalledKibanaType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoInstallationInfoInstalledKibanaType1 overwrites any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as the provided PackageInfoInstallationInfoInstalledKibanaType1 +func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) FromPackageInfoInstallationInfoInstalledKibanaType1(v PackageInfoInstallationInfoInstalledKibanaType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoInstallationInfoInstalledKibanaType1 performs a merge with any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type, using the provided PackageInfoInstallationInfoInstalledKibanaType1 +func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) MergePackageInfoInstallationInfoInstalledKibanaType1(v PackageInfoInstallationInfoInstalledKibanaType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageInfo_InstallationInfo_InstalledKibana_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageInfoType0 returns the union data inside the PackageInfo_Type as a PackageInfoType0 +func (t PackageInfo_Type) AsPackageInfoType0() (PackageInfoType0, error) { + var body PackageInfoType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoType0 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType0 +func (t *PackageInfo_Type) FromPackageInfoType0(v PackageInfoType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoType0 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType0 +func (t *PackageInfo_Type) MergePackageInfoType0(v PackageInfoType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageInfoType1 returns the union data inside the PackageInfo_Type as a PackageInfoType1 +func (t PackageInfo_Type) AsPackageInfoType1() (PackageInfoType1, error) { + var body PackageInfoType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoType1 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType1 +func (t *PackageInfo_Type) FromPackageInfoType1(v PackageInfoType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoType1 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType1 +func (t *PackageInfo_Type) MergePackageInfoType1(v PackageInfoType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageInfoType2 returns the union data inside the PackageInfo_Type as a PackageInfoType2 +func (t PackageInfo_Type) AsPackageInfoType2() (PackageInfoType2, error) { + var body PackageInfoType2 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoType2 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType2 +func (t *PackageInfo_Type) FromPackageInfoType2(v PackageInfoType2) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoType2 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType2 +func (t *PackageInfo_Type) MergePackageInfoType2(v PackageInfoType2) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageInfoType3 returns the union data inside the PackageInfo_Type as a PackageInfoType3 +func (t PackageInfo_Type) AsPackageInfoType3() (PackageInfoType3, error) { + var body PackageInfoType3 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoType3 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType3 +func (t *PackageInfo_Type) FromPackageInfoType3(v PackageInfoType3) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoType3 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType3 +func (t *PackageInfo_Type) MergePackageInfoType3(v PackageInfoType3) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageInfo_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageInfo_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 returns the union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0() (PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0, error) { + var body PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 overwrites any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 performs a merge with any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 returns the union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1() (PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1, error) { + var body PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 overwrites any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 performs a merge with any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageListItemInstallationInfoInstalledKibanaType0 returns the union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as a PackageListItemInstallationInfoInstalledKibanaType0 +func (t PackageListItem_InstallationInfo_InstalledKibana_Type) AsPackageListItemInstallationInfoInstalledKibanaType0() (PackageListItemInstallationInfoInstalledKibanaType0, error) { + var body PackageListItemInstallationInfoInstalledKibanaType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemInstallationInfoInstalledKibanaType0 overwrites any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as the provided PackageListItemInstallationInfoInstalledKibanaType0 +func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) FromPackageListItemInstallationInfoInstalledKibanaType0(v PackageListItemInstallationInfoInstalledKibanaType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemInstallationInfoInstalledKibanaType0 performs a merge with any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type, using the provided PackageListItemInstallationInfoInstalledKibanaType0 +func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) MergePackageListItemInstallationInfoInstalledKibanaType0(v PackageListItemInstallationInfoInstalledKibanaType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageListItemInstallationInfoInstalledKibanaType1 returns the union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as a PackageListItemInstallationInfoInstalledKibanaType1 +func (t PackageListItem_InstallationInfo_InstalledKibana_Type) AsPackageListItemInstallationInfoInstalledKibanaType1() (PackageListItemInstallationInfoInstalledKibanaType1, error) { + var body PackageListItemInstallationInfoInstalledKibanaType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemInstallationInfoInstalledKibanaType1 overwrites any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as the provided PackageListItemInstallationInfoInstalledKibanaType1 +func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) FromPackageListItemInstallationInfoInstalledKibanaType1(v PackageListItemInstallationInfoInstalledKibanaType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemInstallationInfoInstalledKibanaType1 performs a merge with any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type, using the provided PackageListItemInstallationInfoInstalledKibanaType1 +func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) MergePackageListItemInstallationInfoInstalledKibanaType1(v PackageListItemInstallationInfoInstalledKibanaType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageListItem_InstallationInfo_InstalledKibana_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageListItemType0 returns the union data inside the PackageListItem_Type as a PackageListItemType0 +func (t PackageListItem_Type) AsPackageListItemType0() (PackageListItemType0, error) { + var body PackageListItemType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemType0 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType0 +func (t *PackageListItem_Type) FromPackageListItemType0(v PackageListItemType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemType0 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType0 +func (t *PackageListItem_Type) MergePackageListItemType0(v PackageListItemType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageListItemType1 returns the union data inside the PackageListItem_Type as a PackageListItemType1 +func (t PackageListItem_Type) AsPackageListItemType1() (PackageListItemType1, error) { + var body PackageListItemType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemType1 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType1 +func (t *PackageListItem_Type) FromPackageListItemType1(v PackageListItemType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemType1 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType1 +func (t *PackageListItem_Type) MergePackageListItemType1(v PackageListItemType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageListItemType2 returns the union data inside the PackageListItem_Type as a PackageListItemType2 +func (t PackageListItem_Type) AsPackageListItemType2() (PackageListItemType2, error) { + var body PackageListItemType2 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemType2 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType2 +func (t *PackageListItem_Type) FromPackageListItemType2(v PackageListItemType2) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemType2 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType2 +func (t *PackageListItem_Type) MergePackageListItemType2(v PackageListItemType2) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageListItemType3 returns the union data inside the PackageListItem_Type as a PackageListItemType3 +func (t PackageListItem_Type) AsPackageListItemType3() (PackageListItemType3, error) { + var body PackageListItemType3 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemType3 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType3 +func (t *PackageListItem_Type) FromPackageListItemType3(v PackageListItemType3) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemType3 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType3 +func (t *PackageListItem_Type) MergePackageListItemType3(v PackageListItemType3) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageListItem_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageListItem_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsServerHostSecretsSslEsKey0 returns the union data inside the ServerHost_Secrets_Ssl_EsKey as a ServerHostSecretsSslEsKey0 +func (t ServerHost_Secrets_Ssl_EsKey) AsServerHostSecretsSslEsKey0() (ServerHostSecretsSslEsKey0, error) { + var body ServerHostSecretsSslEsKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServerHostSecretsSslEsKey0 overwrites any union data inside the ServerHost_Secrets_Ssl_EsKey as the provided ServerHostSecretsSslEsKey0 +func (t *ServerHost_Secrets_Ssl_EsKey) FromServerHostSecretsSslEsKey0(v ServerHostSecretsSslEsKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServerHostSecretsSslEsKey0 performs a merge with any union data inside the ServerHost_Secrets_Ssl_EsKey, using the provided ServerHostSecretsSslEsKey0 +func (t *ServerHost_Secrets_Ssl_EsKey) MergeServerHostSecretsSslEsKey0(v ServerHostSecretsSslEsKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServerHostSecretsSslEsKey1 returns the union data inside the ServerHost_Secrets_Ssl_EsKey as a ServerHostSecretsSslEsKey1 +func (t ServerHost_Secrets_Ssl_EsKey) AsServerHostSecretsSslEsKey1() (ServerHostSecretsSslEsKey1, error) { + var body ServerHostSecretsSslEsKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServerHostSecretsSslEsKey1 overwrites any union data inside the ServerHost_Secrets_Ssl_EsKey as the provided ServerHostSecretsSslEsKey1 +func (t *ServerHost_Secrets_Ssl_EsKey) FromServerHostSecretsSslEsKey1(v ServerHostSecretsSslEsKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServerHostSecretsSslEsKey1 performs a merge with any union data inside the ServerHost_Secrets_Ssl_EsKey, using the provided ServerHostSecretsSslEsKey1 +func (t *ServerHost_Secrets_Ssl_EsKey) MergeServerHostSecretsSslEsKey1(v ServerHostSecretsSslEsKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ServerHost_Secrets_Ssl_EsKey) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ServerHost_Secrets_Ssl_EsKey) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsServerHostSecretsSslKey0 returns the union data inside the ServerHost_Secrets_Ssl_Key as a ServerHostSecretsSslKey0 +func (t ServerHost_Secrets_Ssl_Key) AsServerHostSecretsSslKey0() (ServerHostSecretsSslKey0, error) { + var body ServerHostSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServerHostSecretsSslKey0 overwrites any union data inside the ServerHost_Secrets_Ssl_Key as the provided ServerHostSecretsSslKey0 +func (t *ServerHost_Secrets_Ssl_Key) FromServerHostSecretsSslKey0(v ServerHostSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServerHostSecretsSslKey0 performs a merge with any union data inside the ServerHost_Secrets_Ssl_Key, using the provided ServerHostSecretsSslKey0 +func (t *ServerHost_Secrets_Ssl_Key) MergeServerHostSecretsSslKey0(v ServerHostSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServerHostSecretsSslKey1 returns the union data inside the ServerHost_Secrets_Ssl_Key as a ServerHostSecretsSslKey1 +func (t ServerHost_Secrets_Ssl_Key) AsServerHostSecretsSslKey1() (ServerHostSecretsSslKey1, error) { + var body ServerHostSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServerHostSecretsSslKey1 overwrites any union data inside the ServerHost_Secrets_Ssl_Key as the provided ServerHostSecretsSslKey1 +func (t *ServerHost_Secrets_Ssl_Key) FromServerHostSecretsSslKey1(v ServerHostSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServerHostSecretsSslKey1 performs a merge with any union data inside the ServerHost_Secrets_Ssl_Key, using the provided ServerHostSecretsSslKey1 +func (t *ServerHost_Secrets_Ssl_Key) MergeServerHostSecretsSslKey1(v ServerHostSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ServerHost_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() return b, err } -func (t *OutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) +func (t *ServerHost_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsBedrockConfig returns the union data inside the UpdateConnectorConfig as a BedrockConfig +func (t UpdateConnectorConfig) AsBedrockConfig() (BedrockConfig, error) { + var body BedrockConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromBedrockConfig overwrites any union data inside the UpdateConnectorConfig as the provided BedrockConfig +func (t *UpdateConnectorConfig) FromBedrockConfig(v BedrockConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeBedrockConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided BedrockConfig +func (t *UpdateConnectorConfig) MergeBedrockConfig(v BedrockConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCrowdstrikeConfig returns the union data inside the UpdateConnectorConfig as a CrowdstrikeConfig +func (t UpdateConnectorConfig) AsCrowdstrikeConfig() (CrowdstrikeConfig, error) { + var body CrowdstrikeConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCrowdstrikeConfig overwrites any union data inside the UpdateConnectorConfig as the provided CrowdstrikeConfig +func (t *UpdateConnectorConfig) FromCrowdstrikeConfig(v CrowdstrikeConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCrowdstrikeConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided CrowdstrikeConfig +func (t *UpdateConnectorConfig) MergeCrowdstrikeConfig(v CrowdstrikeConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsD3securityConfig returns the union data inside the UpdateConnectorConfig as a D3securityConfig +func (t UpdateConnectorConfig) AsD3securityConfig() (D3securityConfig, error) { + var body D3securityConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromD3securityConfig overwrites any union data inside the UpdateConnectorConfig as the provided D3securityConfig +func (t *UpdateConnectorConfig) FromD3securityConfig(v D3securityConfig) error { + b, err := json.Marshal(v) + t.union = b return err } -// AsOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as a OutputRemoteElasticsearchSecretsServiceToken0 -func (t OutputRemoteElasticsearch_Secrets_ServiceToken) AsOutputRemoteElasticsearchSecretsServiceToken0() (OutputRemoteElasticsearchSecretsServiceToken0, error) { - var body OutputRemoteElasticsearchSecretsServiceToken0 +// MergeD3securityConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided D3securityConfig +func (t *UpdateConnectorConfig) MergeD3securityConfig(v D3securityConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsEmailConfig returns the union data inside the UpdateConnectorConfig as a EmailConfig +func (t UpdateConnectorConfig) AsEmailConfig() (EmailConfig, error) { + var body EmailConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromEmailConfig overwrites any union data inside the UpdateConnectorConfig as the provided EmailConfig +func (t *UpdateConnectorConfig) FromEmailConfig(v EmailConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeEmailConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided EmailConfig +func (t *UpdateConnectorConfig) MergeEmailConfig(v EmailConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGeminiConfig returns the union data inside the UpdateConnectorConfig as a GeminiConfig +func (t UpdateConnectorConfig) AsGeminiConfig() (GeminiConfig, error) { + var body GeminiConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGeminiConfig overwrites any union data inside the UpdateConnectorConfig as the provided GeminiConfig +func (t *UpdateConnectorConfig) FromGeminiConfig(v GeminiConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGeminiConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided GeminiConfig +func (t *UpdateConnectorConfig) MergeGeminiConfig(v GeminiConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsResilientConfig returns the union data inside the UpdateConnectorConfig as a ResilientConfig +func (t UpdateConnectorConfig) AsResilientConfig() (ResilientConfig, error) { + var body ResilientConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromResilientConfig overwrites any union data inside the UpdateConnectorConfig as the provided ResilientConfig +func (t *UpdateConnectorConfig) FromResilientConfig(v ResilientConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeResilientConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ResilientConfig +func (t *UpdateConnectorConfig) MergeResilientConfig(v ResilientConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsIndexConfig returns the union data inside the UpdateConnectorConfig as a IndexConfig +func (t UpdateConnectorConfig) AsIndexConfig() (IndexConfig, error) { + var body IndexConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromIndexConfig overwrites any union data inside the UpdateConnectorConfig as the provided IndexConfig +func (t *UpdateConnectorConfig) FromIndexConfig(v IndexConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeIndexConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided IndexConfig +func (t *UpdateConnectorConfig) MergeIndexConfig(v IndexConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsJiraConfig returns the union data inside the UpdateConnectorConfig as a JiraConfig +func (t UpdateConnectorConfig) AsJiraConfig() (JiraConfig, error) { + var body JiraConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromJiraConfig overwrites any union data inside the UpdateConnectorConfig as the provided JiraConfig +func (t *UpdateConnectorConfig) FromJiraConfig(v JiraConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeJiraConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided JiraConfig +func (t *UpdateConnectorConfig) MergeJiraConfig(v JiraConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsDefenderConfig returns the union data inside the UpdateConnectorConfig as a DefenderConfig +func (t UpdateConnectorConfig) AsDefenderConfig() (DefenderConfig, error) { + var body DefenderConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromDefenderConfig overwrites any union data inside the UpdateConnectorConfig as the provided DefenderConfig +func (t *UpdateConnectorConfig) FromDefenderConfig(v DefenderConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeDefenderConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided DefenderConfig +func (t *UpdateConnectorConfig) MergeDefenderConfig(v DefenderConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGenaiAzureConfig returns the union data inside the UpdateConnectorConfig as a GenaiAzureConfig +func (t UpdateConnectorConfig) AsGenaiAzureConfig() (GenaiAzureConfig, error) { + var body GenaiAzureConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGenaiAzureConfig overwrites any union data inside the UpdateConnectorConfig as the provided GenaiAzureConfig +func (t *UpdateConnectorConfig) FromGenaiAzureConfig(v GenaiAzureConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGenaiAzureConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided GenaiAzureConfig +func (t *UpdateConnectorConfig) MergeGenaiAzureConfig(v GenaiAzureConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGenaiOpenaiConfig returns the union data inside the UpdateConnectorConfig as a GenaiOpenaiConfig +func (t UpdateConnectorConfig) AsGenaiOpenaiConfig() (GenaiOpenaiConfig, error) { + var body GenaiOpenaiConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGenaiOpenaiConfig overwrites any union data inside the UpdateConnectorConfig as the provided GenaiOpenaiConfig +func (t *UpdateConnectorConfig) FromGenaiOpenaiConfig(v GenaiOpenaiConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGenaiOpenaiConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided GenaiOpenaiConfig +func (t *UpdateConnectorConfig) MergeGenaiOpenaiConfig(v GenaiOpenaiConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOpsgenieConfig returns the union data inside the UpdateConnectorConfig as a OpsgenieConfig +func (t UpdateConnectorConfig) AsOpsgenieConfig() (OpsgenieConfig, error) { + var body OpsgenieConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOpsgenieConfig overwrites any union data inside the UpdateConnectorConfig as the provided OpsgenieConfig +func (t *UpdateConnectorConfig) FromOpsgenieConfig(v OpsgenieConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOpsgenieConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided OpsgenieConfig +func (t *UpdateConnectorConfig) MergeOpsgenieConfig(v OpsgenieConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPagerdutyConfig returns the union data inside the UpdateConnectorConfig as a PagerdutyConfig +func (t UpdateConnectorConfig) AsPagerdutyConfig() (PagerdutyConfig, error) { + var body PagerdutyConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPagerdutyConfig overwrites any union data inside the UpdateConnectorConfig as the provided PagerdutyConfig +func (t *UpdateConnectorConfig) FromPagerdutyConfig(v PagerdutyConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePagerdutyConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided PagerdutyConfig +func (t *UpdateConnectorConfig) MergePagerdutyConfig(v PagerdutyConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSentineloneConfig returns the union data inside the UpdateConnectorConfig as a SentineloneConfig +func (t UpdateConnectorConfig) AsSentineloneConfig() (SentineloneConfig, error) { + var body SentineloneConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSentineloneConfig overwrites any union data inside the UpdateConnectorConfig as the provided SentineloneConfig +func (t *UpdateConnectorConfig) FromSentineloneConfig(v SentineloneConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSentineloneConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided SentineloneConfig +func (t *UpdateConnectorConfig) MergeSentineloneConfig(v SentineloneConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServicenowConfig returns the union data inside the UpdateConnectorConfig as a ServicenowConfig +func (t UpdateConnectorConfig) AsServicenowConfig() (ServicenowConfig, error) { + var body ServicenowConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServicenowConfig overwrites any union data inside the UpdateConnectorConfig as the provided ServicenowConfig +func (t *UpdateConnectorConfig) FromServicenowConfig(v ServicenowConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServicenowConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ServicenowConfig +func (t *UpdateConnectorConfig) MergeServicenowConfig(v ServicenowConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServicenowItomConfig returns the union data inside the UpdateConnectorConfig as a ServicenowItomConfig +func (t UpdateConnectorConfig) AsServicenowItomConfig() (ServicenowItomConfig, error) { + var body ServicenowItomConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as the provided OutputRemoteElasticsearchSecretsServiceToken0 -func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) FromOutputRemoteElasticsearchSecretsServiceToken0(v OutputRemoteElasticsearchSecretsServiceToken0) error { +// FromServicenowItomConfig overwrites any union data inside the UpdateConnectorConfig as the provided ServicenowItomConfig +func (t *UpdateConnectorConfig) FromServicenowItomConfig(v ServicenowItomConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken, using the provided OutputRemoteElasticsearchSecretsServiceToken0 -func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasticsearchSecretsServiceToken0(v OutputRemoteElasticsearchSecretsServiceToken0) error { +// MergeServicenowItomConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ServicenowItomConfig +func (t *UpdateConnectorConfig) MergeServicenowItomConfig(v ServicenowItomConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -13032,22 +16447,22 @@ func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasti return err } -// AsOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as a OutputRemoteElasticsearchSecretsServiceToken1 -func (t OutputRemoteElasticsearch_Secrets_ServiceToken) AsOutputRemoteElasticsearchSecretsServiceToken1() (OutputRemoteElasticsearchSecretsServiceToken1, error) { - var body OutputRemoteElasticsearchSecretsServiceToken1 +// AsSlackApiConfig returns the union data inside the UpdateConnectorConfig as a SlackApiConfig +func (t UpdateConnectorConfig) AsSlackApiConfig() (SlackApiConfig, error) { + var body SlackApiConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as the provided OutputRemoteElasticsearchSecretsServiceToken1 -func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) FromOutputRemoteElasticsearchSecretsServiceToken1(v OutputRemoteElasticsearchSecretsServiceToken1) error { +// FromSlackApiConfig overwrites any union data inside the UpdateConnectorConfig as the provided SlackApiConfig +func (t *UpdateConnectorConfig) FromSlackApiConfig(v SlackApiConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken, using the provided OutputRemoteElasticsearchSecretsServiceToken1 -func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasticsearchSecretsServiceToken1(v OutputRemoteElasticsearchSecretsServiceToken1) error { +// MergeSlackApiConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided SlackApiConfig +func (t *UpdateConnectorConfig) MergeSlackApiConfig(v SlackApiConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -13058,32 +16473,22 @@ func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasti return err } -func (t OutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsOutputRemoteElasticsearchSecretsSslKey0 returns the union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as a OutputRemoteElasticsearchSecretsSslKey0 -func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) AsOutputRemoteElasticsearchSecretsSslKey0() (OutputRemoteElasticsearchSecretsSslKey0, error) { - var body OutputRemoteElasticsearchSecretsSslKey0 +// AsSwimlaneConfig returns the union data inside the UpdateConnectorConfig as a SwimlaneConfig +func (t UpdateConnectorConfig) AsSwimlaneConfig() (SwimlaneConfig, error) { + var body SwimlaneConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputRemoteElasticsearchSecretsSslKey0 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as the provided OutputRemoteElasticsearchSecretsSslKey0 -func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) FromOutputRemoteElasticsearchSecretsSslKey0(v OutputRemoteElasticsearchSecretsSslKey0) error { +// FromSwimlaneConfig overwrites any union data inside the UpdateConnectorConfig as the provided SwimlaneConfig +func (t *UpdateConnectorConfig) FromSwimlaneConfig(v SwimlaneConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputRemoteElasticsearchSecretsSslKey0 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided OutputRemoteElasticsearchSecretsSslKey0 -func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) MergeOutputRemoteElasticsearchSecretsSslKey0(v OutputRemoteElasticsearchSecretsSslKey0) error { +// MergeSwimlaneConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided SwimlaneConfig +func (t *UpdateConnectorConfig) MergeSwimlaneConfig(v SwimlaneConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -13094,22 +16499,22 @@ func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) MergeOutputRemoteElasticsear return err } -// AsOutputRemoteElasticsearchSecretsSslKey1 returns the union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as a OutputRemoteElasticsearchSecretsSslKey1 -func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) AsOutputRemoteElasticsearchSecretsSslKey1() (OutputRemoteElasticsearchSecretsSslKey1, error) { - var body OutputRemoteElasticsearchSecretsSslKey1 +// AsThehiveConfig returns the union data inside the UpdateConnectorConfig as a ThehiveConfig +func (t UpdateConnectorConfig) AsThehiveConfig() (ThehiveConfig, error) { + var body ThehiveConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputRemoteElasticsearchSecretsSslKey1 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as the provided OutputRemoteElasticsearchSecretsSslKey1 -func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) FromOutputRemoteElasticsearchSecretsSslKey1(v OutputRemoteElasticsearchSecretsSslKey1) error { +// FromThehiveConfig overwrites any union data inside the UpdateConnectorConfig as the provided ThehiveConfig +func (t *UpdateConnectorConfig) FromThehiveConfig(v ThehiveConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputRemoteElasticsearchSecretsSslKey1 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided OutputRemoteElasticsearchSecretsSslKey1 -func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) MergeOutputRemoteElasticsearchSecretsSslKey1(v OutputRemoteElasticsearchSecretsSslKey1) error { +// MergeThehiveConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ThehiveConfig +func (t *UpdateConnectorConfig) MergeThehiveConfig(v ThehiveConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -13120,34 +16525,22 @@ func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) MergeOutputRemoteElasticsear return err } -func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsOutputElasticsearch returns the union data inside the OutputUnion as a OutputElasticsearch -func (t OutputUnion) AsOutputElasticsearch() (OutputElasticsearch, error) { - var body OutputElasticsearch +// AsTinesConfig returns the union data inside the UpdateConnectorConfig as a TinesConfig +func (t UpdateConnectorConfig) AsTinesConfig() (TinesConfig, error) { + var body TinesConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputElasticsearch overwrites any union data inside the OutputUnion as the provided OutputElasticsearch -func (t *OutputUnion) FromOutputElasticsearch(v OutputElasticsearch) error { - v.Type = "elasticsearch" +// FromTinesConfig overwrites any union data inside the UpdateConnectorConfig as the provided TinesConfig +func (t *UpdateConnectorConfig) FromTinesConfig(v TinesConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputElasticsearch performs a merge with any union data inside the OutputUnion, using the provided OutputElasticsearch -func (t *OutputUnion) MergeOutputElasticsearch(v OutputElasticsearch) error { - v.Type = "elasticsearch" +// MergeTinesConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided TinesConfig +func (t *UpdateConnectorConfig) MergeTinesConfig(v TinesConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -13158,24 +16551,22 @@ func (t *OutputUnion) MergeOutputElasticsearch(v OutputElasticsearch) error { return err } -// AsOutputRemoteElasticsearch returns the union data inside the OutputUnion as a OutputRemoteElasticsearch -func (t OutputUnion) AsOutputRemoteElasticsearch() (OutputRemoteElasticsearch, error) { - var body OutputRemoteElasticsearch +// AsTorqConfig returns the union data inside the UpdateConnectorConfig as a TorqConfig +func (t UpdateConnectorConfig) AsTorqConfig() (TorqConfig, error) { + var body TorqConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputRemoteElasticsearch overwrites any union data inside the OutputUnion as the provided OutputRemoteElasticsearch -func (t *OutputUnion) FromOutputRemoteElasticsearch(v OutputRemoteElasticsearch) error { - v.Type = "remote_elasticsearch" +// FromTorqConfig overwrites any union data inside the UpdateConnectorConfig as the provided TorqConfig +func (t *UpdateConnectorConfig) FromTorqConfig(v TorqConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputRemoteElasticsearch performs a merge with any union data inside the OutputUnion, using the provided OutputRemoteElasticsearch -func (t *OutputUnion) MergeOutputRemoteElasticsearch(v OutputRemoteElasticsearch) error { - v.Type = "remote_elasticsearch" +// MergeTorqConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided TorqConfig +func (t *UpdateConnectorConfig) MergeTorqConfig(v TorqConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -13186,24 +16577,22 @@ func (t *OutputUnion) MergeOutputRemoteElasticsearch(v OutputRemoteElasticsearch return err } -// AsOutputLogstash returns the union data inside the OutputUnion as a OutputLogstash -func (t OutputUnion) AsOutputLogstash() (OutputLogstash, error) { - var body OutputLogstash +// AsWebhookConfig returns the union data inside the UpdateConnectorConfig as a WebhookConfig +func (t UpdateConnectorConfig) AsWebhookConfig() (WebhookConfig, error) { + var body WebhookConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputLogstash overwrites any union data inside the OutputUnion as the provided OutputLogstash -func (t *OutputUnion) FromOutputLogstash(v OutputLogstash) error { - v.Type = "logstash" +// FromWebhookConfig overwrites any union data inside the UpdateConnectorConfig as the provided WebhookConfig +func (t *UpdateConnectorConfig) FromWebhookConfig(v WebhookConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputLogstash performs a merge with any union data inside the OutputUnion, using the provided OutputLogstash -func (t *OutputUnion) MergeOutputLogstash(v OutputLogstash) error { - v.Type = "logstash" +// MergeWebhookConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided WebhookConfig +func (t *UpdateConnectorConfig) MergeWebhookConfig(v WebhookConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -13214,24 +16603,22 @@ func (t *OutputUnion) MergeOutputLogstash(v OutputLogstash) error { return err } -// AsOutputKafka returns the union data inside the OutputUnion as a OutputKafka -func (t OutputUnion) AsOutputKafka() (OutputKafka, error) { - var body OutputKafka +// AsCasesWebhookConfig returns the union data inside the UpdateConnectorConfig as a CasesWebhookConfig +func (t UpdateConnectorConfig) AsCasesWebhookConfig() (CasesWebhookConfig, error) { + var body CasesWebhookConfig err := json.Unmarshal(t.union, &body) return body, err } -// FromOutputKafka overwrites any union data inside the OutputUnion as the provided OutputKafka -func (t *OutputUnion) FromOutputKafka(v OutputKafka) error { - v.Type = "kafka" +// FromCasesWebhookConfig overwrites any union data inside the UpdateConnectorConfig as the provided CasesWebhookConfig +func (t *UpdateConnectorConfig) FromCasesWebhookConfig(v CasesWebhookConfig) error { b, err := json.Marshal(v) t.union = b return err } -// MergeOutputKafka performs a merge with any union data inside the OutputUnion, using the provided OutputKafka -func (t *OutputUnion) MergeOutputKafka(v OutputKafka) error { - v.Type = "kafka" +// MergeCasesWebhookConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided CasesWebhookConfig +func (t *UpdateConnectorConfig) MergeCasesWebhookConfig(v CasesWebhookConfig) error { b, err := json.Marshal(v) if err != nil { return err @@ -13242,59 +16629,48 @@ func (t *OutputUnion) MergeOutputKafka(v OutputKafka) error { return err } -func (t OutputUnion) Discriminator() (string, error) { - var discriminator struct { - Discriminator string `json:"type"` - } - err := json.Unmarshal(t.union, &discriminator) - return discriminator.Discriminator, err +// AsXmattersConfig returns the union data inside the UpdateConnectorConfig as a XmattersConfig +func (t UpdateConnectorConfig) AsXmattersConfig() (XmattersConfig, error) { + var body XmattersConfig + err := json.Unmarshal(t.union, &body) + return body, err } -func (t OutputUnion) ValueByDiscriminator() (interface{}, error) { - discriminator, err := t.Discriminator() - if err != nil { - return nil, err - } - switch discriminator { - case "elasticsearch": - return t.AsOutputElasticsearch() - case "kafka": - return t.AsOutputKafka() - case "logstash": - return t.AsOutputLogstash() - case "remote_elasticsearch": - return t.AsOutputRemoteElasticsearch() - default: - return nil, errors.New("unknown discriminator value: " + discriminator) - } +// FromXmattersConfig overwrites any union data inside the UpdateConnectorConfig as the provided XmattersConfig +func (t *UpdateConnectorConfig) FromXmattersConfig(v XmattersConfig) error { + b, err := json.Marshal(v) + t.union = b + return err } -func (t OutputUnion) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} +// MergeXmattersConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided XmattersConfig +func (t *UpdateConnectorConfig) MergeXmattersConfig(v XmattersConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } -func (t *OutputUnion) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged return err } -// AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 returns the union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0() (PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0, error) { - var body PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 +// AsBedrockSecrets returns the union data inside the UpdateConnectorSecrets as a BedrockSecrets +func (t UpdateConnectorSecrets) AsBedrockSecrets() (BedrockSecrets, error) { + var body BedrockSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 overwrites any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0) error { +// FromBedrockSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided BedrockSecrets +func (t *UpdateConnectorSecrets) FromBedrockSecrets(v BedrockSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 performs a merge with any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0) error { +// MergeBedrockSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided BedrockSecrets +func (t *UpdateConnectorSecrets) MergeBedrockSecrets(v BedrockSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13305,22 +16681,22 @@ func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) Merg return err } -// AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 returns the union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1() (PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1, error) { - var body PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 +// AsCrowdstrikeSecrets returns the union data inside the UpdateConnectorSecrets as a CrowdstrikeSecrets +func (t UpdateConnectorSecrets) AsCrowdstrikeSecrets() (CrowdstrikeSecrets, error) { + var body CrowdstrikeSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 overwrites any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1) error { +// FromCrowdstrikeSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided CrowdstrikeSecrets +func (t *UpdateConnectorSecrets) FromCrowdstrikeSecrets(v CrowdstrikeSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 performs a merge with any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1) error { +// MergeCrowdstrikeSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided CrowdstrikeSecrets +func (t *UpdateConnectorSecrets) MergeCrowdstrikeSecrets(v CrowdstrikeSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13331,32 +16707,22 @@ func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) Merg return err } -func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsPackageInfoInstallationInfoInstalledKibanaType0 returns the union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as a PackageInfoInstallationInfoInstalledKibanaType0 -func (t PackageInfo_InstallationInfo_InstalledKibana_Type) AsPackageInfoInstallationInfoInstalledKibanaType0() (PackageInfoInstallationInfoInstalledKibanaType0, error) { - var body PackageInfoInstallationInfoInstalledKibanaType0 +// AsD3securitySecrets returns the union data inside the UpdateConnectorSecrets as a D3securitySecrets +func (t UpdateConnectorSecrets) AsD3securitySecrets() (D3securitySecrets, error) { + var body D3securitySecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageInfoInstallationInfoInstalledKibanaType0 overwrites any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as the provided PackageInfoInstallationInfoInstalledKibanaType0 -func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) FromPackageInfoInstallationInfoInstalledKibanaType0(v PackageInfoInstallationInfoInstalledKibanaType0) error { +// FromD3securitySecrets overwrites any union data inside the UpdateConnectorSecrets as the provided D3securitySecrets +func (t *UpdateConnectorSecrets) FromD3securitySecrets(v D3securitySecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageInfoInstallationInfoInstalledKibanaType0 performs a merge with any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type, using the provided PackageInfoInstallationInfoInstalledKibanaType0 -func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) MergePackageInfoInstallationInfoInstalledKibanaType0(v PackageInfoInstallationInfoInstalledKibanaType0) error { +// MergeD3securitySecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided D3securitySecrets +func (t *UpdateConnectorSecrets) MergeD3securitySecrets(v D3securitySecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13367,22 +16733,22 @@ func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) MergePackageInfoInst return err } -// AsPackageInfoInstallationInfoInstalledKibanaType1 returns the union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as a PackageInfoInstallationInfoInstalledKibanaType1 -func (t PackageInfo_InstallationInfo_InstalledKibana_Type) AsPackageInfoInstallationInfoInstalledKibanaType1() (PackageInfoInstallationInfoInstalledKibanaType1, error) { - var body PackageInfoInstallationInfoInstalledKibanaType1 +// AsEmailSecrets returns the union data inside the UpdateConnectorSecrets as a EmailSecrets +func (t UpdateConnectorSecrets) AsEmailSecrets() (EmailSecrets, error) { + var body EmailSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageInfoInstallationInfoInstalledKibanaType1 overwrites any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as the provided PackageInfoInstallationInfoInstalledKibanaType1 -func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) FromPackageInfoInstallationInfoInstalledKibanaType1(v PackageInfoInstallationInfoInstalledKibanaType1) error { +// FromEmailSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided EmailSecrets +func (t *UpdateConnectorSecrets) FromEmailSecrets(v EmailSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageInfoInstallationInfoInstalledKibanaType1 performs a merge with any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type, using the provided PackageInfoInstallationInfoInstalledKibanaType1 -func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) MergePackageInfoInstallationInfoInstalledKibanaType1(v PackageInfoInstallationInfoInstalledKibanaType1) error { +// MergeEmailSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided EmailSecrets +func (t *UpdateConnectorSecrets) MergeEmailSecrets(v EmailSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13393,32 +16759,22 @@ func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) MergePackageInfoInst return err } -func (t PackageInfo_InstallationInfo_InstalledKibana_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsPackageInfoType0 returns the union data inside the PackageInfo_Type as a PackageInfoType0 -func (t PackageInfo_Type) AsPackageInfoType0() (PackageInfoType0, error) { - var body PackageInfoType0 +// AsGeminiSecrets returns the union data inside the UpdateConnectorSecrets as a GeminiSecrets +func (t UpdateConnectorSecrets) AsGeminiSecrets() (GeminiSecrets, error) { + var body GeminiSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageInfoType0 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType0 -func (t *PackageInfo_Type) FromPackageInfoType0(v PackageInfoType0) error { +// FromGeminiSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided GeminiSecrets +func (t *UpdateConnectorSecrets) FromGeminiSecrets(v GeminiSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageInfoType0 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType0 -func (t *PackageInfo_Type) MergePackageInfoType0(v PackageInfoType0) error { +// MergeGeminiSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided GeminiSecrets +func (t *UpdateConnectorSecrets) MergeGeminiSecrets(v GeminiSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13429,22 +16785,22 @@ func (t *PackageInfo_Type) MergePackageInfoType0(v PackageInfoType0) error { return err } -// AsPackageInfoType1 returns the union data inside the PackageInfo_Type as a PackageInfoType1 -func (t PackageInfo_Type) AsPackageInfoType1() (PackageInfoType1, error) { - var body PackageInfoType1 +// AsResilientSecrets returns the union data inside the UpdateConnectorSecrets as a ResilientSecrets +func (t UpdateConnectorSecrets) AsResilientSecrets() (ResilientSecrets, error) { + var body ResilientSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageInfoType1 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType1 -func (t *PackageInfo_Type) FromPackageInfoType1(v PackageInfoType1) error { +// FromResilientSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided ResilientSecrets +func (t *UpdateConnectorSecrets) FromResilientSecrets(v ResilientSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageInfoType1 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType1 -func (t *PackageInfo_Type) MergePackageInfoType1(v PackageInfoType1) error { +// MergeResilientSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided ResilientSecrets +func (t *UpdateConnectorSecrets) MergeResilientSecrets(v ResilientSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13455,22 +16811,22 @@ func (t *PackageInfo_Type) MergePackageInfoType1(v PackageInfoType1) error { return err } -// AsPackageInfoType2 returns the union data inside the PackageInfo_Type as a PackageInfoType2 -func (t PackageInfo_Type) AsPackageInfoType2() (PackageInfoType2, error) { - var body PackageInfoType2 +// AsJiraSecrets returns the union data inside the UpdateConnectorSecrets as a JiraSecrets +func (t UpdateConnectorSecrets) AsJiraSecrets() (JiraSecrets, error) { + var body JiraSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageInfoType2 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType2 -func (t *PackageInfo_Type) FromPackageInfoType2(v PackageInfoType2) error { +// FromJiraSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided JiraSecrets +func (t *UpdateConnectorSecrets) FromJiraSecrets(v JiraSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageInfoType2 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType2 -func (t *PackageInfo_Type) MergePackageInfoType2(v PackageInfoType2) error { +// MergeJiraSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided JiraSecrets +func (t *UpdateConnectorSecrets) MergeJiraSecrets(v JiraSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13481,22 +16837,22 @@ func (t *PackageInfo_Type) MergePackageInfoType2(v PackageInfoType2) error { return err } -// AsPackageInfoType3 returns the union data inside the PackageInfo_Type as a PackageInfoType3 -func (t PackageInfo_Type) AsPackageInfoType3() (PackageInfoType3, error) { - var body PackageInfoType3 +// AsTeamsSecrets returns the union data inside the UpdateConnectorSecrets as a TeamsSecrets +func (t UpdateConnectorSecrets) AsTeamsSecrets() (TeamsSecrets, error) { + var body TeamsSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageInfoType3 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType3 -func (t *PackageInfo_Type) FromPackageInfoType3(v PackageInfoType3) error { +// FromTeamsSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided TeamsSecrets +func (t *UpdateConnectorSecrets) FromTeamsSecrets(v TeamsSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageInfoType3 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType3 -func (t *PackageInfo_Type) MergePackageInfoType3(v PackageInfoType3) error { +// MergeTeamsSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided TeamsSecrets +func (t *UpdateConnectorSecrets) MergeTeamsSecrets(v TeamsSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13507,32 +16863,22 @@ func (t *PackageInfo_Type) MergePackageInfoType3(v PackageInfoType3) error { return err } -func (t PackageInfo_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *PackageInfo_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 returns the union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0() (PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0, error) { - var body PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 +// AsGenaiSecrets returns the union data inside the UpdateConnectorSecrets as a GenaiSecrets +func (t UpdateConnectorSecrets) AsGenaiSecrets() (GenaiSecrets, error) { + var body GenaiSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 overwrites any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0) error { +// FromGenaiSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided GenaiSecrets +func (t *UpdateConnectorSecrets) FromGenaiSecrets(v GenaiSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 performs a merge with any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0) error { +// MergeGenaiSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided GenaiSecrets +func (t *UpdateConnectorSecrets) MergeGenaiSecrets(v GenaiSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13543,22 +16889,22 @@ func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) return err } -// AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 returns the union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1() (PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1, error) { - var body PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 +// AsOpsgenieSecrets returns the union data inside the UpdateConnectorSecrets as a OpsgenieSecrets +func (t UpdateConnectorSecrets) AsOpsgenieSecrets() (OpsgenieSecrets, error) { + var body OpsgenieSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 overwrites any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1) error { +// FromOpsgenieSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided OpsgenieSecrets +func (t *UpdateConnectorSecrets) FromOpsgenieSecrets(v OpsgenieSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 performs a merge with any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1) error { +// MergeOpsgenieSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided OpsgenieSecrets +func (t *UpdateConnectorSecrets) MergeOpsgenieSecrets(v OpsgenieSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13569,32 +16915,48 @@ func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) return err } -func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err +// AsPagerdutySecrets returns the union data inside the UpdateConnectorSecrets as a PagerdutySecrets +func (t UpdateConnectorSecrets) AsPagerdutySecrets() (PagerdutySecrets, error) { + var body PagerdutySecrets + err := json.Unmarshal(t.union, &body) + return body, err } -func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) +// FromPagerdutySecrets overwrites any union data inside the UpdateConnectorSecrets as the provided PagerdutySecrets +func (t *UpdateConnectorSecrets) FromPagerdutySecrets(v PagerdutySecrets) error { + b, err := json.Marshal(v) + t.union = b return err } -// AsPackageListItemInstallationInfoInstalledKibanaType0 returns the union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as a PackageListItemInstallationInfoInstalledKibanaType0 -func (t PackageListItem_InstallationInfo_InstalledKibana_Type) AsPackageListItemInstallationInfoInstalledKibanaType0() (PackageListItemInstallationInfoInstalledKibanaType0, error) { - var body PackageListItemInstallationInfoInstalledKibanaType0 +// MergePagerdutySecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided PagerdutySecrets +func (t *UpdateConnectorSecrets) MergePagerdutySecrets(v PagerdutySecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSentineloneSecrets returns the union data inside the UpdateConnectorSecrets as a SentineloneSecrets +func (t UpdateConnectorSecrets) AsSentineloneSecrets() (SentineloneSecrets, error) { + var body SentineloneSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageListItemInstallationInfoInstalledKibanaType0 overwrites any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as the provided PackageListItemInstallationInfoInstalledKibanaType0 -func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) FromPackageListItemInstallationInfoInstalledKibanaType0(v PackageListItemInstallationInfoInstalledKibanaType0) error { +// FromSentineloneSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided SentineloneSecrets +func (t *UpdateConnectorSecrets) FromSentineloneSecrets(v SentineloneSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageListItemInstallationInfoInstalledKibanaType0 performs a merge with any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type, using the provided PackageListItemInstallationInfoInstalledKibanaType0 -func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) MergePackageListItemInstallationInfoInstalledKibanaType0(v PackageListItemInstallationInfoInstalledKibanaType0) error { +// MergeSentineloneSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided SentineloneSecrets +func (t *UpdateConnectorSecrets) MergeSentineloneSecrets(v SentineloneSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13605,22 +16967,22 @@ func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) MergePackageList return err } -// AsPackageListItemInstallationInfoInstalledKibanaType1 returns the union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as a PackageListItemInstallationInfoInstalledKibanaType1 -func (t PackageListItem_InstallationInfo_InstalledKibana_Type) AsPackageListItemInstallationInfoInstalledKibanaType1() (PackageListItemInstallationInfoInstalledKibanaType1, error) { - var body PackageListItemInstallationInfoInstalledKibanaType1 +// AsServicenowSecrets returns the union data inside the UpdateConnectorSecrets as a ServicenowSecrets +func (t UpdateConnectorSecrets) AsServicenowSecrets() (ServicenowSecrets, error) { + var body ServicenowSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageListItemInstallationInfoInstalledKibanaType1 overwrites any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as the provided PackageListItemInstallationInfoInstalledKibanaType1 -func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) FromPackageListItemInstallationInfoInstalledKibanaType1(v PackageListItemInstallationInfoInstalledKibanaType1) error { +// FromServicenowSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided ServicenowSecrets +func (t *UpdateConnectorSecrets) FromServicenowSecrets(v ServicenowSecrets) error { b, err := json.Marshal(v) t.union = b return err } - -// MergePackageListItemInstallationInfoInstalledKibanaType1 performs a merge with any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type, using the provided PackageListItemInstallationInfoInstalledKibanaType1 -func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) MergePackageListItemInstallationInfoInstalledKibanaType1(v PackageListItemInstallationInfoInstalledKibanaType1) error { + +// MergeServicenowSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided ServicenowSecrets +func (t *UpdateConnectorSecrets) MergeServicenowSecrets(v ServicenowSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13631,32 +16993,22 @@ func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) MergePackageList return err } -func (t PackageListItem_InstallationInfo_InstalledKibana_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsPackageListItemType0 returns the union data inside the PackageListItem_Type as a PackageListItemType0 -func (t PackageListItem_Type) AsPackageListItemType0() (PackageListItemType0, error) { - var body PackageListItemType0 +// AsSlackApiSecrets returns the union data inside the UpdateConnectorSecrets as a SlackApiSecrets +func (t UpdateConnectorSecrets) AsSlackApiSecrets() (SlackApiSecrets, error) { + var body SlackApiSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageListItemType0 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType0 -func (t *PackageListItem_Type) FromPackageListItemType0(v PackageListItemType0) error { +// FromSlackApiSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided SlackApiSecrets +func (t *UpdateConnectorSecrets) FromSlackApiSecrets(v SlackApiSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageListItemType0 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType0 -func (t *PackageListItem_Type) MergePackageListItemType0(v PackageListItemType0) error { +// MergeSlackApiSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided SlackApiSecrets +func (t *UpdateConnectorSecrets) MergeSlackApiSecrets(v SlackApiSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13667,22 +17019,22 @@ func (t *PackageListItem_Type) MergePackageListItemType0(v PackageListItemType0) return err } -// AsPackageListItemType1 returns the union data inside the PackageListItem_Type as a PackageListItemType1 -func (t PackageListItem_Type) AsPackageListItemType1() (PackageListItemType1, error) { - var body PackageListItemType1 +// AsSwimlaneSecrets returns the union data inside the UpdateConnectorSecrets as a SwimlaneSecrets +func (t UpdateConnectorSecrets) AsSwimlaneSecrets() (SwimlaneSecrets, error) { + var body SwimlaneSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageListItemType1 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType1 -func (t *PackageListItem_Type) FromPackageListItemType1(v PackageListItemType1) error { +// FromSwimlaneSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided SwimlaneSecrets +func (t *UpdateConnectorSecrets) FromSwimlaneSecrets(v SwimlaneSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageListItemType1 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType1 -func (t *PackageListItem_Type) MergePackageListItemType1(v PackageListItemType1) error { +// MergeSwimlaneSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided SwimlaneSecrets +func (t *UpdateConnectorSecrets) MergeSwimlaneSecrets(v SwimlaneSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13693,22 +17045,22 @@ func (t *PackageListItem_Type) MergePackageListItemType1(v PackageListItemType1) return err } -// AsPackageListItemType2 returns the union data inside the PackageListItem_Type as a PackageListItemType2 -func (t PackageListItem_Type) AsPackageListItemType2() (PackageListItemType2, error) { - var body PackageListItemType2 +// AsThehiveSecrets returns the union data inside the UpdateConnectorSecrets as a ThehiveSecrets +func (t UpdateConnectorSecrets) AsThehiveSecrets() (ThehiveSecrets, error) { + var body ThehiveSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageListItemType2 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType2 -func (t *PackageListItem_Type) FromPackageListItemType2(v PackageListItemType2) error { +// FromThehiveSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided ThehiveSecrets +func (t *UpdateConnectorSecrets) FromThehiveSecrets(v ThehiveSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageListItemType2 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType2 -func (t *PackageListItem_Type) MergePackageListItemType2(v PackageListItemType2) error { +// MergeThehiveSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided ThehiveSecrets +func (t *UpdateConnectorSecrets) MergeThehiveSecrets(v ThehiveSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13719,22 +17071,22 @@ func (t *PackageListItem_Type) MergePackageListItemType2(v PackageListItemType2) return err } -// AsPackageListItemType3 returns the union data inside the PackageListItem_Type as a PackageListItemType3 -func (t PackageListItem_Type) AsPackageListItemType3() (PackageListItemType3, error) { - var body PackageListItemType3 +// AsTinesSecrets returns the union data inside the UpdateConnectorSecrets as a TinesSecrets +func (t UpdateConnectorSecrets) AsTinesSecrets() (TinesSecrets, error) { + var body TinesSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromPackageListItemType3 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType3 -func (t *PackageListItem_Type) FromPackageListItemType3(v PackageListItemType3) error { +// FromTinesSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided TinesSecrets +func (t *UpdateConnectorSecrets) FromTinesSecrets(v TinesSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergePackageListItemType3 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType3 -func (t *PackageListItem_Type) MergePackageListItemType3(v PackageListItemType3) error { +// MergeTinesSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided TinesSecrets +func (t *UpdateConnectorSecrets) MergeTinesSecrets(v TinesSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13745,32 +17097,22 @@ func (t *PackageListItem_Type) MergePackageListItemType3(v PackageListItemType3) return err } -func (t PackageListItem_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *PackageListItem_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsServerHostSecretsSslEsKey0 returns the union data inside the ServerHost_Secrets_Ssl_EsKey as a ServerHostSecretsSslEsKey0 -func (t ServerHost_Secrets_Ssl_EsKey) AsServerHostSecretsSslEsKey0() (ServerHostSecretsSslEsKey0, error) { - var body ServerHostSecretsSslEsKey0 +// AsTorqSecrets returns the union data inside the UpdateConnectorSecrets as a TorqSecrets +func (t UpdateConnectorSecrets) AsTorqSecrets() (TorqSecrets, error) { + var body TorqSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromServerHostSecretsSslEsKey0 overwrites any union data inside the ServerHost_Secrets_Ssl_EsKey as the provided ServerHostSecretsSslEsKey0 -func (t *ServerHost_Secrets_Ssl_EsKey) FromServerHostSecretsSslEsKey0(v ServerHostSecretsSslEsKey0) error { +// FromTorqSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided TorqSecrets +func (t *UpdateConnectorSecrets) FromTorqSecrets(v TorqSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergeServerHostSecretsSslEsKey0 performs a merge with any union data inside the ServerHost_Secrets_Ssl_EsKey, using the provided ServerHostSecretsSslEsKey0 -func (t *ServerHost_Secrets_Ssl_EsKey) MergeServerHostSecretsSslEsKey0(v ServerHostSecretsSslEsKey0) error { +// MergeTorqSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided TorqSecrets +func (t *UpdateConnectorSecrets) MergeTorqSecrets(v TorqSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13781,22 +17123,22 @@ func (t *ServerHost_Secrets_Ssl_EsKey) MergeServerHostSecretsSslEsKey0(v ServerH return err } -// AsServerHostSecretsSslEsKey1 returns the union data inside the ServerHost_Secrets_Ssl_EsKey as a ServerHostSecretsSslEsKey1 -func (t ServerHost_Secrets_Ssl_EsKey) AsServerHostSecretsSslEsKey1() (ServerHostSecretsSslEsKey1, error) { - var body ServerHostSecretsSslEsKey1 +// AsWebhookSecrets returns the union data inside the UpdateConnectorSecrets as a WebhookSecrets +func (t UpdateConnectorSecrets) AsWebhookSecrets() (WebhookSecrets, error) { + var body WebhookSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromServerHostSecretsSslEsKey1 overwrites any union data inside the ServerHost_Secrets_Ssl_EsKey as the provided ServerHostSecretsSslEsKey1 -func (t *ServerHost_Secrets_Ssl_EsKey) FromServerHostSecretsSslEsKey1(v ServerHostSecretsSslEsKey1) error { +// FromWebhookSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided WebhookSecrets +func (t *UpdateConnectorSecrets) FromWebhookSecrets(v WebhookSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergeServerHostSecretsSslEsKey1 performs a merge with any union data inside the ServerHost_Secrets_Ssl_EsKey, using the provided ServerHostSecretsSslEsKey1 -func (t *ServerHost_Secrets_Ssl_EsKey) MergeServerHostSecretsSslEsKey1(v ServerHostSecretsSslEsKey1) error { +// MergeWebhookSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided WebhookSecrets +func (t *UpdateConnectorSecrets) MergeWebhookSecrets(v WebhookSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13807,32 +17149,22 @@ func (t *ServerHost_Secrets_Ssl_EsKey) MergeServerHostSecretsSslEsKey1(v ServerH return err } -func (t ServerHost_Secrets_Ssl_EsKey) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *ServerHost_Secrets_Ssl_EsKey) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - -// AsServerHostSecretsSslKey0 returns the union data inside the ServerHost_Secrets_Ssl_Key as a ServerHostSecretsSslKey0 -func (t ServerHost_Secrets_Ssl_Key) AsServerHostSecretsSslKey0() (ServerHostSecretsSslKey0, error) { - var body ServerHostSecretsSslKey0 +// AsCasesWebhookSecrets returns the union data inside the UpdateConnectorSecrets as a CasesWebhookSecrets +func (t UpdateConnectorSecrets) AsCasesWebhookSecrets() (CasesWebhookSecrets, error) { + var body CasesWebhookSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromServerHostSecretsSslKey0 overwrites any union data inside the ServerHost_Secrets_Ssl_Key as the provided ServerHostSecretsSslKey0 -func (t *ServerHost_Secrets_Ssl_Key) FromServerHostSecretsSslKey0(v ServerHostSecretsSslKey0) error { +// FromCasesWebhookSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided CasesWebhookSecrets +func (t *UpdateConnectorSecrets) FromCasesWebhookSecrets(v CasesWebhookSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergeServerHostSecretsSslKey0 performs a merge with any union data inside the ServerHost_Secrets_Ssl_Key, using the provided ServerHostSecretsSslKey0 -func (t *ServerHost_Secrets_Ssl_Key) MergeServerHostSecretsSslKey0(v ServerHostSecretsSslKey0) error { +// MergeCasesWebhookSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided CasesWebhookSecrets +func (t *UpdateConnectorSecrets) MergeCasesWebhookSecrets(v CasesWebhookSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13843,22 +17175,22 @@ func (t *ServerHost_Secrets_Ssl_Key) MergeServerHostSecretsSslKey0(v ServerHostS return err } -// AsServerHostSecretsSslKey1 returns the union data inside the ServerHost_Secrets_Ssl_Key as a ServerHostSecretsSslKey1 -func (t ServerHost_Secrets_Ssl_Key) AsServerHostSecretsSslKey1() (ServerHostSecretsSslKey1, error) { - var body ServerHostSecretsSslKey1 +// AsXmattersSecrets returns the union data inside the UpdateConnectorSecrets as a XmattersSecrets +func (t UpdateConnectorSecrets) AsXmattersSecrets() (XmattersSecrets, error) { + var body XmattersSecrets err := json.Unmarshal(t.union, &body) return body, err } -// FromServerHostSecretsSslKey1 overwrites any union data inside the ServerHost_Secrets_Ssl_Key as the provided ServerHostSecretsSslKey1 -func (t *ServerHost_Secrets_Ssl_Key) FromServerHostSecretsSslKey1(v ServerHostSecretsSslKey1) error { +// FromXmattersSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided XmattersSecrets +func (t *UpdateConnectorSecrets) FromXmattersSecrets(v XmattersSecrets) error { b, err := json.Marshal(v) t.union = b return err } -// MergeServerHostSecretsSslKey1 performs a merge with any union data inside the ServerHost_Secrets_Ssl_Key, using the provided ServerHostSecretsSslKey1 -func (t *ServerHost_Secrets_Ssl_Key) MergeServerHostSecretsSslKey1(v ServerHostSecretsSslKey1) error { +// MergeXmattersSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided XmattersSecrets +func (t *UpdateConnectorSecrets) MergeXmattersSecrets(v XmattersSecrets) error { b, err := json.Marshal(v) if err != nil { return err @@ -13869,16 +17201,6 @@ func (t *ServerHost_Secrets_Ssl_Key) MergeServerHostSecretsSslKey1(v ServerHostS return err } -func (t ServerHost_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} - -func (t *ServerHost_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} - // AsUpdateOutputElasticsearchSecretsSslKey0 returns the union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key as a UpdateOutputElasticsearchSecretsSslKey0 func (t UpdateOutputElasticsearch_Secrets_Ssl_Key) AsUpdateOutputElasticsearchSecretsSslKey0() (UpdateOutputElasticsearchSecretsSslKey0, error) { var body UpdateOutputElasticsearchSecretsSslKey0 @@ -14360,9 +17682,209 @@ func (t UpdateOutputUnion) MarshalJSON() ([]byte, error) { return b, err } -func (t *UpdateOutputUnion) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err +func (t *UpdateOutputUnion) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// Override default JSON handling for CreateConnectorConfig to handle AdditionalProperties and union +func (a *CreateConnectorConfig) UnmarshalJSON(b []byte) error { + err := a.union.UnmarshalJSON(b) + if err != nil { + return err + } + object := make(map[string]json.RawMessage) + err = json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for CreateConnectorConfig to handle AdditionalProperties and union +func (a CreateConnectorConfig) MarshalJSON() ([]byte, error) { + var err error + b, err := a.union.MarshalJSON() + if err != nil { + return nil, err + } + object := make(map[string]json.RawMessage) + if a.union != nil { + err = json.Unmarshal(b, &object) + if err != nil { + return nil, err + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Override default JSON handling for CreateConnectorSecrets to handle AdditionalProperties and union +func (a *CreateConnectorSecrets) UnmarshalJSON(b []byte) error { + err := a.union.UnmarshalJSON(b) + if err != nil { + return err + } + object := make(map[string]json.RawMessage) + err = json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for CreateConnectorSecrets to handle AdditionalProperties and union +func (a CreateConnectorSecrets) MarshalJSON() ([]byte, error) { + var err error + b, err := a.union.MarshalJSON() + if err != nil { + return nil, err + } + object := make(map[string]json.RawMessage) + if a.union != nil { + err = json.Unmarshal(b, &object) + if err != nil { + return nil, err + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Override default JSON handling for UpdateConnectorConfig to handle AdditionalProperties and union +func (a *UpdateConnectorConfig) UnmarshalJSON(b []byte) error { + err := a.union.UnmarshalJSON(b) + if err != nil { + return err + } + object := make(map[string]json.RawMessage) + err = json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for UpdateConnectorConfig to handle AdditionalProperties and union +func (a UpdateConnectorConfig) MarshalJSON() ([]byte, error) { + var err error + b, err := a.union.MarshalJSON() + if err != nil { + return nil, err + } + object := make(map[string]json.RawMessage) + if a.union != nil { + err = json.Unmarshal(b, &object) + if err != nil { + return nil, err + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Override default JSON handling for UpdateConnectorSecrets to handle AdditionalProperties and union +func (a *UpdateConnectorSecrets) UnmarshalJSON(b []byte) error { + err := a.union.UnmarshalJSON(b) + if err != nil { + return err + } + object := make(map[string]json.RawMessage) + err = json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for UpdateConnectorSecrets to handle AdditionalProperties and union +func (a UpdateConnectorSecrets) MarshalJSON() ([]byte, error) { + var err error + b, err := a.union.MarshalJSON() + if err != nil { + return nil, err + } + object := make(map[string]json.RawMessage) + if a.union != nil { + err = json.Unmarshal(b, &object) + if err != nil { + return nil, err + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) } // RequestEditorFn is the function signature for the RequestEditor callback function @@ -14565,6 +18087,25 @@ type ClientInterface interface { PutParameter(ctx context.Context, id string, body PutParameterJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + // DeleteActionsConnectorId request + DeleteActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetActionsConnectorId request + GetActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostActionsConnectorIdWithBody request with any body + PostActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutActionsConnectorIdWithBody request with any body + PutActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetActionsConnectors request + GetActionsConnectors(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) + // GetAllDataViewsDefault request GetAllDataViewsDefault(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) @@ -15149,6 +18690,90 @@ func (c *Client) PutParameter(ctx context.Context, id string, body PutParameterJ return c.Client.Do(req) } +func (c *Client) DeleteActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteActionsConnectorIdRequest(c.Server, spaceId, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetActionsConnectorIdRequest(c.Server, spaceId, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostActionsConnectorIdRequestWithBody(c.Server, spaceId, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostActionsConnectorIdRequest(c.Server, spaceId, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutActionsConnectorIdRequestWithBody(c.Server, spaceId, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutActionsConnectorIdRequest(c.Server, spaceId, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetActionsConnectors(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetActionsConnectorsRequest(c.Server, spaceId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + func (c *Client) GetAllDataViewsDefault(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) { req, err := NewGetAllDataViewsDefaultRequest(c.Server, spaceId) if err != nil { @@ -17111,32 +20736,190 @@ func NewPutFleetPackagePoliciesPackagepolicyidRequestWithBody(server string, pac return nil, err } - req.Header.Add("Content-Type", contentType) - - return req, nil -} + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostParametersRequest calls the generic PostParameters builder with application/json body +func NewPostParametersRequest(server string, body PostParametersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostParametersRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostParametersRequestWithBody generates requests for PostParameters with any type of body +func NewPostParametersRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/params") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteParameterRequest generates requests for DeleteParameter +func NewDeleteParameterRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetParameterRequest generates requests for GetParameter +func NewGetParameterRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutParameterRequest calls the generic PutParameter builder with application/json body +func NewPutParameterRequest(server string, id string, body PutParameterJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutParameterRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewPutParameterRequestWithBody generates requests for PutParameter with any type of body +func NewPutParameterRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteActionsConnectorIdRequest generates requests for DeleteActionsConnectorId +func NewDeleteActionsConnectorIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string -// NewPostParametersRequest calls the generic PostParameters builder with application/json body -func NewPostParametersRequest(server string, body PostParametersJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) if err != nil { return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPostParametersRequestWithBody(server, "application/json", bodyReader) -} - -// NewPostParametersRequestWithBody generates requests for PostParameters with any type of body -func NewPostParametersRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { - var err error serverURL, err := url.Parse(server) if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/synthetics/params") + operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -17146,23 +20929,28 @@ func NewPostParametersRequestWithBody(server string, contentType string, body io return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), body) + req, err := http.NewRequest("DELETE", queryURL.String(), nil) if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) - return req, nil } -// NewDeleteParameterRequest generates requests for DeleteParameter -func NewDeleteParameterRequest(server string, id string) (*http.Request, error) { +// NewGetActionsConnectorIdRequest generates requests for GetActionsConnectorId +func NewGetActionsConnectorIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { var err error var pathParam0 string - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) if err != nil { return nil, err } @@ -17172,7 +20960,7 @@ func NewDeleteParameterRequest(server string, id string) (*http.Request, error) return nil, err } - operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) + operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -17182,7 +20970,7 @@ func NewDeleteParameterRequest(server string, id string) (*http.Request, error) return nil, err } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) + req, err := http.NewRequest("GET", queryURL.String(), nil) if err != nil { return nil, err } @@ -17190,13 +20978,31 @@ func NewDeleteParameterRequest(server string, id string) (*http.Request, error) return req, nil } -// NewGetParameterRequest generates requests for GetParameter -func NewGetParameterRequest(server string, id string) (*http.Request, error) { +// NewPostActionsConnectorIdRequest calls the generic PostActionsConnectorId builder with application/json body +func NewPostActionsConnectorIdRequest(server string, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostActionsConnectorIdRequestWithBody(server, spaceId, id, "application/json", bodyReader) +} + +// NewPostActionsConnectorIdRequestWithBody generates requests for PostActionsConnectorId with any type of body +func NewPostActionsConnectorIdRequestWithBody(server string, spaceId SpaceId, id string, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) if err != nil { return nil, err } @@ -17206,7 +21012,7 @@ func NewGetParameterRequest(server string, id string) (*http.Request, error) { return nil, err } - operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) + operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -17216,32 +21022,41 @@ func NewGetParameterRequest(server string, id string) (*http.Request, error) { return nil, err } - req, err := http.NewRequest("GET", queryURL.String(), nil) + req, err := http.NewRequest("POST", queryURL.String(), body) if err != nil { return nil, err } + req.Header.Add("Content-Type", contentType) + return req, nil } -// NewPutParameterRequest calls the generic PutParameter builder with application/json body -func NewPutParameterRequest(server string, id string, body PutParameterJSONRequestBody) (*http.Request, error) { +// NewPutActionsConnectorIdRequest calls the generic PutActionsConnectorId builder with application/json body +func NewPutActionsConnectorIdRequest(server string, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody) (*http.Request, error) { var bodyReader io.Reader buf, err := json.Marshal(body) if err != nil { return nil, err } bodyReader = bytes.NewReader(buf) - return NewPutParameterRequestWithBody(server, id, "application/json", bodyReader) + return NewPutActionsConnectorIdRequestWithBody(server, spaceId, id, "application/json", bodyReader) } -// NewPutParameterRequestWithBody generates requests for PutParameter with any type of body -func NewPutParameterRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { +// NewPutActionsConnectorIdRequestWithBody generates requests for PutActionsConnectorId with any type of body +func NewPutActionsConnectorIdRequestWithBody(server string, spaceId SpaceId, id string, contentType string, body io.Reader) (*http.Request, error) { var err error var pathParam0 string - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) if err != nil { return nil, err } @@ -17251,7 +21066,7 @@ func NewPutParameterRequestWithBody(server string, id string, contentType string return nil, err } - operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) + operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) if operationPath[0] == '/' { operationPath = "." + operationPath } @@ -17271,6 +21086,40 @@ func NewPutParameterRequestWithBody(server string, id string, contentType string return req, nil } +// NewGetActionsConnectorsRequest generates requests for GetActionsConnectors +func NewGetActionsConnectorsRequest(server string, spaceId SpaceId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/actions/connectors", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + // NewGetAllDataViewsDefaultRequest generates requests for GetAllDataViewsDefault func NewGetAllDataViewsDefaultRequest(server string, spaceId SpaceId) (*http.Request, error) { var err error @@ -17658,6 +21507,25 @@ type ClientWithResponsesInterface interface { PutParameterWithResponse(ctx context.Context, id string, body PutParameterJSONRequestBody, reqEditors ...RequestEditorFn) (*PutParameterResponse, error) + // DeleteActionsConnectorIdWithResponse request + DeleteActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteActionsConnectorIdResponse, error) + + // GetActionsConnectorIdWithResponse request + GetActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetActionsConnectorIdResponse, error) + + // PostActionsConnectorIdWithBodyWithResponse request with any body + PostActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) + + PostActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) + + // PutActionsConnectorIdWithBodyWithResponse request with any body + PutActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) + + PutActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) + + // GetActionsConnectorsWithResponse request + GetActionsConnectorsWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetActionsConnectorsResponse, error) + // GetAllDataViewsDefaultWithResponse request GetAllDataViewsDefaultWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetAllDataViewsDefaultResponse, error) @@ -17919,7 +21787,7 @@ type GetFleetEnrollmentApiKeysResponse struct { HTTPResponse *http.Response JSON200 *struct { Items []EnrollmentApiKey `json:"items"` - // Deprecated: + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set List []struct { // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. Active bool `json:"active"` @@ -18109,7 +21977,7 @@ type PostFleetEpmPackagesPkgnamePkgversionResponse struct { Body []byte HTTPResponse *http.Response JSON200 *struct { - Meta struct { + UnderscoreMeta struct { InstallSource string `json:"install_source"` Name string `json:"name"` } `json:"_meta"` @@ -18540,7 +22408,153 @@ type PostFleetPackagePoliciesResponse struct { } // Status returns HTTPResponse.Status -func (r PostFleetPackagePoliciesResponse) Status() string { +func (r PostFleetPackagePoliciesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetPackagePoliciesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteFleetPackagePoliciesPackagepolicyidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Id string `json:"id"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r DeleteFleetPackagePoliciesPackagepolicyidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetPackagePoliciesPackagepolicyidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item PackagePolicy `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON404 *struct { + Message string `json:"message"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetPackagePoliciesPackagepolicyidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutFleetPackagePoliciesPackagepolicyidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item PackagePolicy `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON403 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PutFleetPackagePoliciesPackagepolicyidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostParametersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CreateParamResponse +} + +// Status returns HTTPResponse.Status +func (r PostParametersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostParametersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteParameterResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r DeleteParameterResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -18548,30 +22562,21 @@ func (r PostFleetPackagePoliciesResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r PostFleetPackagePoliciesResponse) StatusCode() int { +func (r DeleteParameterResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type DeleteFleetPackagePoliciesPackagepolicyidResponse struct { +type GetParameterResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *struct { - Id string `json:"id"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } + JSON200 *SyntheticsGetParameterResponse } // Status returns HTTPResponse.Status -func (r DeleteFleetPackagePoliciesPackagepolicyidResponse) Status() string { +func (r GetParameterResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -18579,33 +22584,21 @@ func (r DeleteFleetPackagePoliciesPackagepolicyidResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r DeleteFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { +func (r GetParameterResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type GetFleetPackagePoliciesPackagepolicyidResponse struct { +type PutParameterResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *struct { - Item PackagePolicy `json:"item"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - JSON404 *struct { - Message string `json:"message"` - } + JSON200 *map[string]interface{} } // Status returns HTTPResponse.Status -func (r GetFleetPackagePoliciesPackagepolicyidResponse) Status() string { +func (r PutParameterResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -18613,37 +22606,20 @@ func (r GetFleetPackagePoliciesPackagepolicyidResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r GetFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { +func (r PutParameterResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type PutFleetPackagePoliciesPackagepolicyidResponse struct { +type DeleteActionsConnectorIdResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *struct { - Item PackagePolicy `json:"item"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - JSON403 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } } // Status returns HTTPResponse.Status -func (r PutFleetPackagePoliciesPackagepolicyidResponse) Status() string { +func (r DeleteActionsConnectorIdResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -18651,21 +22627,21 @@ func (r PutFleetPackagePoliciesPackagepolicyidResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r PutFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { +func (r DeleteActionsConnectorIdResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type PostParametersResponse struct { +type GetActionsConnectorIdResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *CreateParamResponse + JSON200 *ConnectorResponse } // Status returns HTTPResponse.Status -func (r PostParametersResponse) Status() string { +func (r GetActionsConnectorIdResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -18673,20 +22649,44 @@ func (r PostParametersResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r PostParametersResponse) StatusCode() int { +func (r GetActionsConnectorIdResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type DeleteParameterResponse struct { +type PostActionsConnectorIdResponse struct { Body []byte HTTPResponse *http.Response + JSON200 *struct { + Config *map[string]interface{} `json:"config,omitempty"` + + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` + + // Id The identifier for the connector. + Id string `json:"id"` + + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` + + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` + + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` + } } // Status returns HTTPResponse.Status -func (r DeleteParameterResponse) Status() string { +func (r PostActionsConnectorIdResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -18694,21 +22694,44 @@ func (r DeleteParameterResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r DeleteParameterResponse) StatusCode() int { +func (r PostActionsConnectorIdResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type GetParameterResponse struct { +type PutActionsConnectorIdResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *SyntheticsGetParameterResponse + JSON200 *struct { + Config *map[string]interface{} `json:"config,omitempty"` + + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` + + // Id The identifier for the connector. + Id string `json:"id"` + + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` + + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` + + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` + } } // Status returns HTTPResponse.Status -func (r GetParameterResponse) Status() string { +func (r PutActionsConnectorIdResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -18716,21 +22739,21 @@ func (r GetParameterResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r GetParameterResponse) StatusCode() int { +func (r PutActionsConnectorIdResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } return 0 } -type PutParameterResponse struct { +type GetActionsConnectorsResponse struct { Body []byte HTTPResponse *http.Response - JSON200 *map[string]interface{} + JSON200 *[]ConnectorResponse } // Status returns HTTPResponse.Status -func (r PutParameterResponse) Status() string { +func (r GetActionsConnectorsResponse) Status() string { if r.HTTPResponse != nil { return r.HTTPResponse.Status } @@ -18738,7 +22761,7 @@ func (r PutParameterResponse) Status() string { } // StatusCode returns HTTPResponse.StatusCode -func (r PutParameterResponse) StatusCode() int { +func (r GetActionsConnectorsResponse) StatusCode() int { if r.HTTPResponse != nil { return r.HTTPResponse.StatusCode } @@ -19270,6 +23293,67 @@ func (c *ClientWithResponses) PutParameterWithResponse(ctx context.Context, id s return ParsePutParameterResponse(rsp) } +// DeleteActionsConnectorIdWithResponse request returning *DeleteActionsConnectorIdResponse +func (c *ClientWithResponses) DeleteActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteActionsConnectorIdResponse, error) { + rsp, err := c.DeleteActionsConnectorId(ctx, spaceId, id, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteActionsConnectorIdResponse(rsp) +} + +// GetActionsConnectorIdWithResponse request returning *GetActionsConnectorIdResponse +func (c *ClientWithResponses) GetActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetActionsConnectorIdResponse, error) { + rsp, err := c.GetActionsConnectorId(ctx, spaceId, id, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetActionsConnectorIdResponse(rsp) +} + +// PostActionsConnectorIdWithBodyWithResponse request with arbitrary body returning *PostActionsConnectorIdResponse +func (c *ClientWithResponses) PostActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) { + rsp, err := c.PostActionsConnectorIdWithBody(ctx, spaceId, id, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostActionsConnectorIdResponse(rsp) +} + +func (c *ClientWithResponses) PostActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) { + rsp, err := c.PostActionsConnectorId(ctx, spaceId, id, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostActionsConnectorIdResponse(rsp) +} + +// PutActionsConnectorIdWithBodyWithResponse request with arbitrary body returning *PutActionsConnectorIdResponse +func (c *ClientWithResponses) PutActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) { + rsp, err := c.PutActionsConnectorIdWithBody(ctx, spaceId, id, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePutActionsConnectorIdResponse(rsp) +} + +func (c *ClientWithResponses) PutActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) { + rsp, err := c.PutActionsConnectorId(ctx, spaceId, id, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePutActionsConnectorIdResponse(rsp) +} + +// GetActionsConnectorsWithResponse request returning *GetActionsConnectorsResponse +func (c *ClientWithResponses) GetActionsConnectorsWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetActionsConnectorsResponse, error) { + rsp, err := c.GetActionsConnectors(ctx, spaceId, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetActionsConnectorsResponse(rsp) +} + // GetAllDataViewsDefaultWithResponse request returning *GetAllDataViewsDefaultResponse func (c *ClientWithResponses) GetAllDataViewsDefaultWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetAllDataViewsDefaultResponse, error) { rsp, err := c.GetAllDataViewsDefault(ctx, spaceId, reqEditors...) @@ -19712,7 +23796,7 @@ func ParseGetFleetEnrollmentApiKeysResponse(rsp *http.Response) (*GetFleetEnroll case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { Items []EnrollmentApiKey `json:"items"` - // Deprecated: + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set List []struct { // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. Active bool `json:"active"` @@ -19917,7 +24001,7 @@ func ParsePostFleetEpmPackagesPkgnamePkgversionResponse(rsp *http.Response) (*Po switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Meta struct { + UnderscoreMeta struct { InstallSource string `json:"install_source"` Name string `json:"name"` } `json:"_meta"` @@ -20712,6 +24796,172 @@ func ParsePutParameterResponse(rsp *http.Response) (*PutParameterResponse, error return response, nil } +// ParseDeleteActionsConnectorIdResponse parses an HTTP response from a DeleteActionsConnectorIdWithResponse call +func ParseDeleteActionsConnectorIdResponse(rsp *http.Response) (*DeleteActionsConnectorIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DeleteActionsConnectorIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + +// ParseGetActionsConnectorIdResponse parses an HTTP response from a GetActionsConnectorIdWithResponse call +func ParseGetActionsConnectorIdResponse(rsp *http.Response) (*GetActionsConnectorIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetActionsConnectorIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ConnectorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParsePostActionsConnectorIdResponse parses an HTTP response from a PostActionsConnectorIdWithResponse call +func ParsePostActionsConnectorIdResponse(rsp *http.Response) (*PostActionsConnectorIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &PostActionsConnectorIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Config *map[string]interface{} `json:"config,omitempty"` + + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` + + // Id The identifier for the connector. + Id string `json:"id"` + + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` + + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` + + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParsePutActionsConnectorIdResponse parses an HTTP response from a PutActionsConnectorIdWithResponse call +func ParsePutActionsConnectorIdResponse(rsp *http.Response) (*PutActionsConnectorIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &PutActionsConnectorIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Config *map[string]interface{} `json:"config,omitempty"` + + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` + + // Id The identifier for the connector. + Id string `json:"id"` + + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` + + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` + + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseGetActionsConnectorsResponse parses an HTTP response from a GetActionsConnectorsWithResponse call +func ParseGetActionsConnectorsResponse(rsp *http.Response) (*GetActionsConnectorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetActionsConnectorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []ConnectorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + // ParseGetAllDataViewsDefaultResponse parses an HTTP response from a GetAllDataViewsDefaultWithResponse call func ParseGetAllDataViewsDefaultResponse(rsp *http.Response) (*GetAllDataViewsDefaultResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) diff --git a/generated/kbapi/transform_schema.go b/generated/kbapi/transform_schema.go index 11453eb8a..17d86fb24 100644 --- a/generated/kbapi/transform_schema.go +++ b/generated/kbapi/transform_schema.go @@ -547,6 +547,7 @@ var transformers = []TransformFunc{ transformFleetPaths, transformRemoveExamples, transformRemoveUnusedComponents, + transformOmitEmptyNullable, } // transformFilterPaths filters the paths in a schema down to a specified list @@ -571,6 +572,8 @@ func transformFilterPaths(schema *Schema) { "/api/synthetics/params": {"post"}, "/api/synthetics/params/{id}": {"get", "put", "delete"}, "/api/apm/settings/agent-configuration": {"get", "put", "delete"}, + "/api/actions/connector/{id}": {"get", "put", "post", "delete"}, + "/api/actions/connectors": {"get"}, } for path, pathInfo := range schema.Paths { @@ -717,6 +720,8 @@ func transformKibanaPaths(schema *Schema) { "/api/data_views", "/api/data_views/data_view", "/api/data_views/data_view/{viewId}", + "/api/actions/connector/{id}", + "/api/actions/connectors", } // Add a spaceId parameter if not already present @@ -748,6 +753,25 @@ func transformKibanaPaths(schema *Schema) { } } + // Connectors + // Can be removed when https://github.com/elastic/kibana/issues/230149 is addressed. + connectorPath := schema.MustGetPath("/s/{spaceId}/api/actions/connector/{id}") + connectorsPath := schema.MustGetPath("/s/{spaceId}/api/actions/connectors") + + connectorPath.Post.CreateRef(schema, "create_connector_config", "requestBody.content.application/json.schema.properties.config") + connectorPath.Post.CreateRef(schema, "create_connector_secrets", "requestBody.content.application/json.schema.properties.secrets") + + connectorPath.Put.CreateRef(schema, "update_connector_config", "requestBody.content.application/json.schema.properties.config") + connectorPath.Put.CreateRef(schema, "update_connector_secrets", "requestBody.content.application/json.schema.properties.secrets") + + connectorPath.Get.CreateRef(schema, "connector_response", "responses.200.content.application/json.schema") + connectorsPath.Get.Set("responses.200.content.application/json.schema", Map{ + "type": "array", + "items": Map{ + "$ref": "#/components/schemas/connector_response", + }, + }) + // Data views // https://github.com/elastic/kibana/blob/main/src/plugins/data_views/server/rest_api_routes/schema.ts @@ -836,16 +860,6 @@ func transformFleetPaths(schema *Schema) { agentPolicyPath.Get.CreateRef(schema, "agent_policy", "responses.200.content.application/json.schema.properties.item") agentPolicyPath.Put.CreateRef(schema, "agent_policy", "responses.200.content.application/json.schema.properties.item") - // See: https://github.com/elastic/kibana/issues/197155 - // [request body.keep_monitoring_alive]: expected value of type [boolean] but got [null] - // [request body.supports_agentless]: expected value of type [boolean] but got [null] - // [request body.overrides]: expected value of type [boolean] but got [null] - // [request body.required_versions]: definition for this key is missing"} - for _, key := range []string{"keep_monitoring_alive", "supports_agentless", "overrides", "required_versions"} { - agentPoliciesPath.Post.Set(fmt.Sprintf("requestBody.content.application/json.schema.properties.%s.x-omitempty", key), true) - agentPolicyPath.Put.Set(fmt.Sprintf("requestBody.content.application/json.schema.properties.%s.x-omitempty", key), true) - } - schema.Components.CreateRef(schema, "agent_policy_global_data_tags_item", "schemas.agent_policy.properties.global_data_tags.items") // Define the value types for the GlobalDataTags @@ -880,14 +894,6 @@ func transformFleetPaths(schema *Schema) { hostPath.Get.CreateRef(schema, "server_host", "responses.200.content.application/json.schema.properties.item") hostPath.Put.CreateRef(schema, "server_host", "responses.200.content.application/json.schema.properties.item") - // 8.6.2 regression - // [request body.proxy_id]: definition for this key is missing - // See: https://github.com/elastic/kibana/issues/197155 - hostsPath.Post.Set("requestBody.content.application/json.schema.properties.proxy_id.x-omitempty", true) - hostPath.Put.Set("requestBody.content.application/json.schema.properties.proxy_id.x-omitempty", true) - hostsPath.Post.Set("requestBody.content.application/json.schema.properties.ssl.x-omitempty", true) - hostPath.Put.Set("requestBody.content.application/json.schema.properties.ssl.x-omitempty", true) - // Outputs // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/models/output.ts // https://github.com/elastic/kibana/blob/main/x-pack/plugins/fleet/common/types/rest_spec/output.ts @@ -950,32 +956,6 @@ func transformFleetPaths(schema *Schema) { }, }) - for _, name := range []string{"new_output", "update_output"} { - for _, typ := range []string{"elasticsearch", "remote_elasticsearch", "logstash", "kafka"} { - // [request body.1.ca_sha256]: expected value of type [string] but got [null]" - // See: https://github.com/elastic/kibana/issues/197155 - schema.Components.Set(fmt.Sprintf("schemas.%s_%s.properties.ca_sha256.x-omitempty", name, typ), true) - - // [request body.1.ca_trusted_fingerprint]: expected value of type [string] but got [null] - // See: https://github.com/elastic/kibana/issues/197155 - schema.Components.Set(fmt.Sprintf("schemas.%s_%s.properties.ca_trusted_fingerprint.x-omitempty", name, typ), true) - - // 8.6.2 regression - // [request body.proxy_id]: definition for this key is missing" - // See: https://github.com/elastic/kibana/issues/197155 - schema.Components.Set(fmt.Sprintf("schemas.%s_%s.properties.proxy_id.x-omitempty", name, typ), true) - } - - // [request body.1.shipper]: expected a plain object value, but found [null] instead - // See: https://github.com/elastic/kibana/issues/197155 - schema.Components.Set(fmt.Sprintf("schemas.%s_shipper.x-omitempty", name), true) - - // [request body.1.ssl]: expected a plain object value, but found [null] instead - // See: https://github.com/elastic/kibana/issues/197155 - schema.Components.Set(fmt.Sprintf("schemas.%s_ssl.x-omitempty", name), true) - - } - for _, typ := range []string{"elasticsearch", "remote_elasticsearch", "logstash", "kafka"} { // strict_dynamic_mapping_exception: [1:345] mapping set to strict, dynamic introduction of [id] within [ingest-outputs] is not allowed" // See: https://github.com/elastic/kibana/issues/197155 @@ -1017,13 +997,35 @@ func transformFleetPaths(schema *Schema) { schema.Components.Set("schemas.package_policy_request.properties.vars", Map{"type": "object"}) schema.Components.Set("schemas.package_policy_request_input.properties.vars", Map{"type": "object"}) schema.Components.Set("schemas.package_policy_request_input_stream.properties.vars", Map{"type": "object"}) +} + +func setAllXOmitEmpty(key string, node Map) { + maybeNullable, hasNullable := node.Get("nullable") + isNullable, ok := maybeNullable.(bool) + if hasNullable && ok && isNullable { + node.Set("x-omitempty", true) + } + + properties, hasProperties := node.GetMap("properties") + if !hasProperties { + return + } + + properties.Iterate(setAllXOmitEmpty) +} + +func transformOmitEmptyNullable(schema *Schema) { + componentSchemas := schema.Components.MustGetMap("schemas") + componentSchemas.Iterate(setAllXOmitEmpty) - // [request body.0.output_id]: expected value of type [string] but got [null] - // [request body.1.output_id]: definition for this key is missing" - // See: https://github.com/elastic/kibana/issues/197155 - schema.Components.Set("schemas.package_policy_request.properties.output_id.x-omitempty", true) - schema.Components.Set("schemas.package_policy_request.properties.additional_datastreams_permissions.x-omitempty", true) - schema.Components.Set("schemas.package_policy_request.properties.supports_agentless.x-omitempty", true) + for _, pathInfo := range schema.Paths { + for _, methInfo := range pathInfo.Endpoints { + requestBody, ok := methInfo.GetMap("requestBody.content.application/json.schema.properties") + if ok { + requestBody.Iterate(setAllXOmitEmpty) + } + } + } } // transformRemoveExamples removes all examples. diff --git a/internal/clients/api_client.go b/internal/clients/api_client.go index d2cdee505..0174b6daf 100644 --- a/internal/clients/api_client.go +++ b/internal/clients/api_client.go @@ -11,7 +11,6 @@ import ( "github.com/disaster37/go-kibana-rest/v8" "github.com/elastic/go-elasticsearch/v8" "github.com/elastic/terraform-provider-elasticstack/generated/alerting" - "github.com/elastic/terraform-provider-elasticstack/generated/connectors" "github.com/elastic/terraform-provider-elasticstack/generated/slo" "github.com/elastic/terraform-provider-elasticstack/internal/clients/config" "github.com/elastic/terraform-provider-elasticstack/internal/clients/fleet" @@ -25,7 +24,6 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/logging" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/oapi-codegen/oapi-codegen/v2/pkg/securityprovider" ) type CompositeId struct { @@ -76,7 +74,6 @@ type ApiClient struct { kibana *kibana.Client kibanaOapi *kibana_oapi.Client alerting alerting.AlertingAPI - connectors *connectors.Client slo slo.SloAPI kibanaConfig kibana.Config fleet *fleet.Client @@ -105,11 +102,6 @@ func NewAcceptanceTestingClient() (*ApiClient, error) { kibanaHttpClient := kib.Client.GetClient() - actionConnectors, err := buildConnectorsClient(cfg, kibanaHttpClient) - if err != nil { - return nil, fmt.Errorf("cannot create Kibana action connectors client: [%w]", err) - } - kibOapi, err := kibana_oapi.NewClient(*cfg.KibanaOapi) if err != nil { return nil, err @@ -126,7 +118,6 @@ func NewAcceptanceTestingClient() (*ApiClient, error) { kibanaOapi: kibOapi, alerting: buildAlertingClient(cfg, kibanaHttpClient).AlertingAPI, slo: buildSloClient(cfg, kibanaHttpClient).SloAPI, - connectors: actionConnectors, kibanaConfig: *cfg.Kibana, fleet: fleetClient, version: version, @@ -262,14 +253,6 @@ func (a *ApiClient) GetAlertingClient() (alerting.AlertingAPI, error) { return a.alerting, nil } -func (a *ApiClient) GetKibanaConnectorsClient(ctx context.Context) (*connectors.Client, error) { - if a.connectors == nil { - return nil, errors.New("kibana action connector client not found") - } - - return a.connectors, nil -} - func (a *ApiClient) GetSloClient() (slo.SloAPI, error) { if a.slo == nil { return nil, errors.New("slo client not found") @@ -551,33 +534,6 @@ func buildAlertingClient(cfg config.Client, httpClient *http.Client) *alerting.A return alerting.NewAPIClient(&alertingConfig) } -func buildConnectorsClient(cfg config.Client, httpClient *http.Client) (*connectors.Client, error) { - var authInterceptor connectors.ClientOption - if cfg.Kibana.ApiKey != "" { - apiKeyProvider, err := securityprovider.NewSecurityProviderApiKey( - "header", - "Authorization", - "ApiKey "+cfg.Kibana.ApiKey, - ) - if err != nil { - return nil, fmt.Errorf("unable to create api key auth provider: %w", err) - } - authInterceptor = connectors.WithRequestEditorFn(apiKeyProvider.Intercept) - } else { - basicAuthProvider, err := securityprovider.NewSecurityProviderBasicAuth(cfg.Kibana.Username, cfg.Kibana.Password) - if err != nil { - return nil, fmt.Errorf("unable to create basic auth provider: %w", err) - } - authInterceptor = connectors.WithRequestEditorFn(basicAuthProvider.Intercept) - } - - return connectors.NewClient( - cfg.Kibana.Address, - authInterceptor, - connectors.WithHTTPClient(httpClient), - ) -} - func buildSloClient(cfg config.Client, httpClient *http.Client) *slo.APIClient { sloConfig := slo.Configuration{ Debug: logging.IsDebugOrHigher(), @@ -643,14 +599,9 @@ func newApiClientFromConfig(cfg config.Client, version string) (*ApiClient, erro client.kibanaOapi = kibanaOapiClient kibanaHttpClient := kibanaClient.Client.GetClient() - connectorsClient, err := buildConnectorsClient(cfg, kibanaHttpClient) - if err != nil { - return nil, fmt.Errorf("cannot create Kibana connectors client: [%w]", err) - } client.alerting = buildAlertingClient(cfg, kibanaHttpClient).AlertingAPI client.slo = buildSloClient(cfg, kibanaHttpClient).SloAPI - client.connectors = connectorsClient } if cfg.Fleet != nil { diff --git a/internal/clients/kibana/connector.go b/internal/clients/kibana/connector.go deleted file mode 100644 index 050cdbd08..000000000 --- a/internal/clients/kibana/connector.go +++ /dev/null @@ -1,1670 +0,0 @@ -package kibana - -import ( - "bytes" - "context" - "encoding/json" - "fmt" - "io" - - "github.com/elastic/terraform-provider-elasticstack/generated/connectors" - "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/elastic/terraform-provider-elasticstack/internal/models" - "github.com/elastic/terraform-provider-elasticstack/internal/utils" - "github.com/hashicorp/terraform-plugin-log/tflog" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" -) - -func CreateConnector(ctx context.Context, apiClient *clients.ApiClient, connectorOld models.KibanaActionConnector) (string, diag.Diagnostics) { - client, err := apiClient.GetKibanaConnectorsClient(ctx) - if err != nil { - return "", diag.FromErr(err) - } - - body, err := createConnectorRequestBody(connectorOld) - if err != nil { - return "", diag.FromErr(err) - } - - httpResp, err := client.CreateConnectorWithBody(ctx, connectorOld.SpaceID, &connectors.CreateConnectorParams{KbnXsrf: connectors.KbnXsrf("true")}, "application/json", body) - - if err != nil { - return "", diag.Errorf("unable to create connector: [%v]", err) - } - - defer httpResp.Body.Close() - - resp, err := connectors.ParseCreateConnectorResponse(httpResp) - if err != nil { - return "", diag.Errorf("unable to parse connector create response: [%v]", err) - } - - if resp.JSON400 != nil { - return "", diag.Errorf("%s: %s", *resp.JSON400.Error, *resp.JSON400.Message) - } - - if resp.JSON401 != nil { - return "", diag.Errorf("%s: %s", *resp.JSON401.Error, *resp.JSON401.Message) - } - - if resp.JSON200 == nil { - return "", diag.Errorf("%s: %s", resp.Status(), string(resp.Body)) - } - - connectorNew, err := connectorResponseToModel(connectorOld.SpaceID, *resp.JSON200) - if err != nil { - return "", diag.FromErr(err) - } - - return connectorNew.ConnectorID, nil -} - -func UpdateConnector(ctx context.Context, apiClient *clients.ApiClient, connectorOld models.KibanaActionConnector) (string, diag.Diagnostics) { - client, err := apiClient.GetKibanaConnectorsClient(ctx) - if err != nil { - return "", diag.FromErr(err) - } - - body, err := updateConnectorRequestBody(connectorOld) - if err != nil { - return "", diag.FromErr(err) - } - - httpResp, err := client.UpdateConnectorWithBody(ctx, connectorOld.SpaceID, connectorOld.ConnectorID, &connectors.UpdateConnectorParams{KbnXsrf: connectors.KbnXsrf("true")}, "application/json", body) - - if err != nil { - return "", diag.Errorf("unable to update connector: [%v]", err) - } - - defer httpResp.Body.Close() - - resp, err := connectors.ParseCreateConnectorResponse(httpResp) - if err != nil { - return "", diag.Errorf("unable to parse connector update response: [%v]", err) - } - - if resp.JSON400 != nil { - return "", diag.Errorf("%s: %s", *resp.JSON400.Error, *resp.JSON400.Message) - } - - if resp.JSON401 != nil { - return "", diag.Errorf("%s: %s", *resp.JSON401.Error, *resp.JSON401.Message) - } - - if resp.JSON200 == nil { - return "", diag.Errorf("%s: %s", resp.Status(), string(resp.Body)) - } - - connectorNew, err := connectorResponseToModel(connectorOld.SpaceID, *resp.JSON200) - if err != nil { - return "", diag.FromErr(err) - } - - return connectorNew.ConnectorID, nil -} - -func GetConnector(ctx context.Context, apiClient *clients.ApiClient, connectorID, spaceID string) (*models.KibanaActionConnector, diag.Diagnostics) { - client, err := apiClient.GetKibanaConnectorsClient(ctx) - if err != nil { - return nil, diag.FromErr(err) - } - - httpResp, err := client.GetConnector(ctx, spaceID, connectorID) - - if err != nil { - return nil, diag.Errorf("unable to get connector: [%v]", err) - } - - defer httpResp.Body.Close() - - resp, err := connectors.ParseGetConnectorResponse(httpResp) - if err != nil { - return nil, diag.Errorf("unable to parse connector get response: [%v]", err) - } - - if resp.JSON401 != nil { - return nil, diag.Errorf("%s: %s", *resp.JSON401.Error, *resp.JSON401.Message) - } - - if resp.JSON404 != nil { - return nil, nil - } - - if resp.JSON200 == nil { - return nil, diag.Errorf("%s: %s", resp.Status(), string(resp.Body)) - } - - connector, err := connectorResponseToModel(spaceID, *resp.JSON200) - if err != nil { - return nil, diag.Errorf("unable to convert response to model: %v", err) - } - - return connector, nil -} - -func SearchConnectors(ctx context.Context, apiClient *clients.ApiClient, connectorName, spaceID, connectorTypeID string) ([]*models.KibanaActionConnector, diag.Diagnostics) { - client, err := apiClient.GetKibanaConnectorsClient(ctx) - if err != nil { - return nil, diag.FromErr(err) - } - - httpResp, err := client.GetConnectors(ctx, spaceID) - - if err != nil { - return nil, diag.Errorf("unable to get connectors: [%v]", err) - } - - defer httpResp.Body.Close() - - resp, err := connectors.ParseGetConnectorsResponse(httpResp) - if err != nil { - return nil, diag.Errorf("unable to parse connectors get response: [%v]", err) - } - - if resp.JSON401 != nil { - return nil, diag.Errorf("%s: %s", *resp.JSON401.Error, *resp.JSON401.Message) - } - - if resp.JSON200 == nil { - return nil, diag.Errorf("%s: %s", resp.Status(), string(resp.Body)) - } - - foundConnectors := []*models.KibanaActionConnector{} - for _, connector := range *resp.JSON200 { - if connector.Name != connectorName { - continue - } - - if connectorTypeID != "" && string(connector.ConnectorTypeId) != connectorTypeID { - continue - } - - //this marshaling and unmarshaling business allows us to create a type with unexported fields. - bytes, err := json.Marshal(connector) - if err != nil { - return nil, diag.Errorf("cannot marshal connector: %v", err) - } - - var respProps connectors.ConnectorResponseProperties - err = json.Unmarshal(bytes, &respProps) - if err != nil { - return nil, diag.Errorf("cannot unmarshal connector: %v", err) - } - - c, err := connectorResponseToModel(spaceID, respProps) - if err != nil { - return nil, diag.Errorf("unable to convert response to model: %v", err) - } - - foundConnectors = append(foundConnectors, c) - } - if len(foundConnectors) == 0 { - tflog.Debug(ctx, fmt.Sprintf("no connectors found with name [%s/%s] and type [%s]", spaceID, connectorName, connectorTypeID)) - } - - return foundConnectors, nil -} - -func DeleteConnector(ctx context.Context, apiClient *clients.ApiClient, connectorID string, spaceID string) diag.Diagnostics { - client, err := apiClient.GetKibanaConnectorsClient(ctx) - if err != nil { - return diag.FromErr(err) - } - - httpResp, err := client.DeleteConnector(ctx, spaceID, connectorID, &connectors.DeleteConnectorParams{KbnXsrf: "true"}) - - if err != nil { - return diag.Errorf("unable to delete connector: [%v]", err) - } - - defer httpResp.Body.Close() - - resp, err := connectors.ParseDeleteConnectorResponse(httpResp) - if err != nil { - return diag.Errorf("unable to parse connector get response: [%v]", err) - } - - if resp.JSON404 != nil { - return diag.Errorf("%s: %s", *resp.JSON404.Error, *resp.JSON404.Message) - } - - if resp.JSON401 != nil { - return diag.Errorf("%s: %s", *resp.JSON401.Error, *resp.JSON401.Message) - } - - if resp.StatusCode() != 200 && resp.StatusCode() != 204 { - return diag.Errorf("failed to delete connector: got status [%v] [%s]", resp.StatusCode(), resp.Status()) - } - - return nil -} - -func ConnectorConfigWithDefaults(connectorTypeID, plan, backend, state string) (string, error) { - switch connectors.ConnectorTypes(connectorTypeID) { - - case connectors.ConnectorTypesDotCasesWebhook: - return connectorConfigWithDefaultsCasesWebhook(plan) - - case connectors.ConnectorTypesDotEmail: - return connectorConfigWithDefaultsEmail(plan) - - case connectors.ConnectorTypesDotGemini: - return connectorConfigWithDefaultsGemini(plan) - - case connectors.ConnectorTypesDotIndex: - return connectorConfigWithDefaultsIndex(plan) - - case connectors.ConnectorTypesDotJira: - return connectorConfigWithDefaultsJira(plan) - - case connectors.ConnectorTypesDotOpsgenie: - return connectorConfigWithDefaultsOpsgenie(plan) - - case connectors.ConnectorTypesDotPagerduty: - return connectorConfigWithDefaultsPagerduty(plan) - - case connectors.ConnectorTypesDotResilient: - return connectorConfigWithDefaultsResilient(plan) - - case connectors.ConnectorTypesDotServicenow: - return connectorConfigWithDefaultsServicenow(plan, backend) - - case connectors.ConnectorTypesDotServicenowItom: - return connectorConfigWithDefaultsServicenowItom(plan) - - case connectors.ConnectorTypesDotServicenowSir: - return connectorConfigWithDefaultsServicenowSir(plan, backend) - - case connectors.ConnectorTypesDotSwimlane: - return connectorConfigWithDefaultsSwimlane(plan) - - case connectors.ConnectorTypesDotTines: - return connectorConfigWithDefaultsTines(plan) - - case connectors.ConnectorTypesDotWebhook: - return connectorConfigWithDefaultsWebhook(plan) - - case connectors.ConnectorTypesDotXmatters: - return connectorConfigWithDefaultsXmatters(plan) - } - return plan, nil -} - -// User can omit optonal fields in config JSON. -// The func adds empty optional fields to the diff. -// Otherwise plan command shows omitted fields as the diff, -// because backend returns all fields. -func connectorConfigWithDefaults[T any](plan string) (string, error) { - var config T - if err := json.Unmarshal([]byte(plan), &config); err != nil { - return "", err - } - customJSON, err := json.Marshal(config) - if err != nil { - return "", err - } - return string(customJSON), nil -} - -func connectorConfigWithDefaultsCasesWebhook(plan string) (string, error) { - var custom connectors.ConfigPropertiesCasesWebhook - if err := json.Unmarshal([]byte(plan), &custom); err != nil { - return "", err - } - if custom.CreateIncidentMethod == nil { - custom.CreateIncidentMethod = new(connectors.ConfigPropertiesCasesWebhookCreateIncidentMethod) - *custom.CreateIncidentMethod = connectors.ConfigPropertiesCasesWebhookCreateIncidentMethodPost - } - if custom.HasAuth == nil { - custom.HasAuth = utils.Pointer(true) - } - if custom.UpdateIncidentMethod == nil { - custom.UpdateIncidentMethod = new(connectors.ConfigPropertiesCasesWebhookUpdateIncidentMethod) - *custom.UpdateIncidentMethod = connectors.ConfigPropertiesCasesWebhookUpdateIncidentMethodPut - } - customJSON, err := json.Marshal(custom) - if err != nil { - return "", err - } - return string(customJSON), nil -} - -func connectorConfigWithDefaultsEmail(plan string) (string, error) { - var custom connectors.ConfigPropertiesEmail - if err := json.Unmarshal([]byte(plan), &custom); err != nil { - return "", err - } - if custom.HasAuth == nil { - custom.HasAuth = utils.Pointer(true) - } - if custom.Service == nil { - custom.Service = new(string) - *custom.Service = "other" - } - customJSON, err := json.Marshal(custom) - if err != nil { - return "", err - } - return string(customJSON), nil -} - -func connectorConfigWithDefaultsGemini(plan string) (string, error) { - return plan, nil -} - -func connectorConfigWithDefaultsIndex(plan string) (string, error) { - var custom connectors.ConfigPropertiesIndex - if err := json.Unmarshal([]byte(plan), &custom); err != nil { - return "", err - } - if custom.Refresh == nil { - custom.Refresh = utils.Pointer(false) - } - customJSON, err := json.Marshal(custom) - if err != nil { - return "", err - } - return string(customJSON), nil -} - -func connectorConfigWithDefaultsJira(plan string) (string, error) { - return connectorConfigWithDefaults[connectors.ConfigPropertiesJira](plan) -} - -func connectorConfigWithDefaultsOpsgenie(plan string) (string, error) { - return plan, nil -} - -func connectorConfigWithDefaultsPagerduty(plan string) (string, error) { - return connectorConfigWithDefaults[connectors.ConfigPropertiesPagerduty](plan) -} - -func connectorConfigWithDefaultsResilient(plan string) (string, error) { - return plan, nil -} - -func connectorConfigWithDefaultsServicenow(plan, backend string) (string, error) { - var planConfig connectors.ConfigPropertiesServicenow - if err := json.Unmarshal([]byte(plan), &planConfig); err != nil { - return "", err - } - var backendConfig connectors.ConfigPropertiesServicenow - if err := json.Unmarshal([]byte(backend), &backendConfig); err != nil { - return "", err - } - if planConfig.IsOAuth == nil && backendConfig.IsOAuth != nil && !*backendConfig.IsOAuth { - planConfig.IsOAuth = utils.Pointer(false) - } - if planConfig.UsesTableApi == nil { - planConfig.UsesTableApi = utils.Pointer(true) - } - customJSON, err := json.Marshal(planConfig) - if err != nil { - return "", err - } - return string(customJSON), nil -} - -func connectorConfigWithDefaultsServicenowItom(plan string) (string, error) { - var custom connectors.ConfigPropertiesServicenowItom - if err := json.Unmarshal([]byte(plan), &custom); err != nil { - return "", err - } - if custom.IsOAuth == nil { - custom.IsOAuth = utils.Pointer(false) - } - customJSON, err := json.Marshal(custom) - if err != nil { - return "", err - } - return string(customJSON), nil -} - -func connectorConfigWithDefaultsServicenowSir(plan, backend string) (string, error) { - return connectorConfigWithDefaultsServicenow(plan, backend) -} - -func connectorConfigWithDefaultsSwimlane(plan string) (string, error) { - var custom connectors.ConfigPropertiesSwimlane - if err := json.Unmarshal([]byte(plan), &custom); err != nil { - return "", err - } - if custom.Mappings == nil { - custom.Mappings = &connectors.ConfigPropertiesSwimlaneMappings{} - } - customJSON, err := json.Marshal(custom) - if err != nil { - return "", err - } - return string(customJSON), nil -} - -func connectorConfigWithDefaultsTines(plan string) (string, error) { - return plan, nil -} - -func connectorConfigWithDefaultsWebhook(plan string) (string, error) { - return plan, nil -} - -func connectorConfigWithDefaultsXmatters(plan string) (string, error) { - var custom connectors.ConfigPropertiesXmatters - if err := json.Unmarshal([]byte(plan), &custom); err != nil { - return "", err - } - if custom.UsesBasic == nil { - custom.UsesBasic = utils.Pointer(true) - } - customJSON, err := json.Marshal(custom) - if err != nil { - return "", err - } - return string(customJSON), nil -} - -func createConnectorRequestBody(connector models.KibanaActionConnector) (io.Reader, error) { - switch connectors.ConnectorTypes(connector.ConnectorTypeID) { - - case connectors.ConnectorTypesDotCasesWebhook: - return createConnectorRequestCasesWebhook(connector) - - case connectors.ConnectorTypesDotEmail: - return createConnectorRequestEmail(connector) - - case connectors.ConnectorTypesDotGemini: - return createConnectorRequestGemini(connector) - - case connectors.ConnectorTypesDotIndex: - return createConnectorRequestIndex(connector) - - case connectors.ConnectorTypesDotJira: - return createConnectorRequestJira(connector) - - case connectors.ConnectorTypesDotOpsgenie: - return createConnectorRequestOpsgenie(connector) - - case connectors.ConnectorTypesDotPagerduty: - return createConnectorRequestPagerduty(connector) - - case connectors.ConnectorTypesDotResilient: - return createConnectorRequestResilient(connector) - - case connectors.ConnectorTypesDotServicenow: - return createConnectorRequestServicenow(connector) - - case connectors.ConnectorTypesDotServicenowItom: - return createConnectorRequestServicenowItom(connector) - - case connectors.ConnectorTypesDotServicenowSir: - return createConnectorRequestServicenowSir(connector) - - case connectors.ConnectorTypesDotServerLog: - return createConnectorRequestServerLog(connector) - - case connectors.ConnectorTypesDotSlack: - return createConnectorRequestSlack(connector) - - case connectors.ConnectorTypesDotSlackApi: - return createConnectorRequestSlackApi(connector) - - case connectors.ConnectorTypesDotSwimlane: - return createConnectorRequestSwimlane(connector) - - case connectors.ConnectorTypesDotTeams: - return createConnectorRequestTeams(connector) - - case connectors.ConnectorTypesDotTines: - return createConnectorRequestTines(connector) - - case connectors.ConnectorTypesDotWebhook: - return createConnectorRequestWebhook(connector) - - case connectors.ConnectorTypesDotXmatters: - return createConnectorRequestXmatters(connector) - } - - return nil, fmt.Errorf("unknown connector type [%s]", connector.ConnectorTypeID) -} - -func updateConnectorRequestBody(connector models.KibanaActionConnector) (io.Reader, error) { - switch connectors.ConnectorTypes(connector.ConnectorTypeID) { - - case connectors.ConnectorTypesDotCasesWebhook: - return updateConnectorRequestCasesWebhook(connector) - - case connectors.ConnectorTypesDotEmail: - return updateConnectorRequestEmail(connector) - - case connectors.ConnectorTypesDotGemini: - return updateConnectorRequestGemini(connector) - - case connectors.ConnectorTypesDotIndex: - return updateConnectorRequestIndex(connector) - - case connectors.ConnectorTypesDotJira: - return updateConnectorRequestJira(connector) - - case connectors.ConnectorTypesDotOpsgenie: - return updateConnectorRequestOpsgenie(connector) - - case connectors.ConnectorTypesDotPagerduty: - return updateConnectorRequestPagerduty(connector) - - case connectors.ConnectorTypesDotResilient: - return updateConnectorRequestResilient(connector) - - case connectors.ConnectorTypesDotServicenow: - return updateConnectorRequestServicenow(connector) - - case connectors.ConnectorTypesDotServicenowItom: - return updateConnectorRequestServicenowItom(connector) - - case connectors.ConnectorTypesDotServicenowSir: - return updateConnectorRequestServicenowSir(connector) - - case connectors.ConnectorTypesDotServerLog: - return updateConnectorRequestServerlog(connector) - - case connectors.ConnectorTypesDotSlack: - return updateConnectorRequestSlack(connector) - - case connectors.ConnectorTypesDotSlackApi: - return updateConnectorRequestSlackApi(connector) - - case connectors.ConnectorTypesDotSwimlane: - return updateConnectorRequestSwimlane(connector) - - case connectors.ConnectorTypesDotTeams: - return updateConnectorRequestTeams(connector) - - case connectors.ConnectorTypesDotTines: - return updateConnectorRequestTines(connector) - - case connectors.ConnectorTypesDotWebhook: - return updateConnectorRequestWebhook(connector) - - case connectors.ConnectorTypesDotXmatters: - return updateConnectorRequestXmatters(connector) - } - - return nil, fmt.Errorf("unknown connector type [%s]", connector.ConnectorTypeID) -} - -func marshalConnectorRequest[C, S, R any](connector models.KibanaActionConnector, config *C, secrets *S, request *R) (io.Reader, error) { - if config != nil && len(connector.ConfigJSON) > 0 { - if err := json.Unmarshal([]byte(connector.ConfigJSON), config); err != nil { - return nil, fmt.Errorf("failed to unmarshal [config] attribute: %w", err) - } - } - - if secrets != nil && len(connector.SecretsJSON) > 0 { - if err := json.Unmarshal([]byte(connector.SecretsJSON), secrets); err != nil { - return nil, fmt.Errorf("failed to unmarshal [secrets] attribute: %w", err) - } - } - - bt, err := json.Marshal(request) - if err != nil { - return nil, fmt.Errorf("failed to marshal request: %w", err) - } - - return bytes.NewReader(bt), nil -} - -func createConnectorRequestCasesWebhook(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestCasesWebhook{ - ConnectorTypeId: connectors.DotCasesWebhook, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestEmail(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestEmail{ - ConnectorTypeId: connectors.CreateConnectorRequestEmailConnectorTypeIdDotEmail, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestGemini(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestGemini{ - ConnectorTypeId: connectors.CreateConnectorRequestGeminiConnectorTypeIdDotGemini, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestIndex(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestIndex{ - ConnectorTypeId: connectors.CreateConnectorRequestIndexConnectorTypeIdDotIndex, - Name: connector.Name, - } - - return marshalConnectorRequest[connectors.ConfigPropertiesIndex, any](connector, &request.Config, nil, &request) -} - -func createConnectorRequestJira(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestJira{ - ConnectorTypeId: connectors.CreateConnectorRequestJiraConnectorTypeIdDotJira, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestOpsgenie(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestOpsgenie{ - ConnectorTypeId: connectors.CreateConnectorRequestOpsgenieConnectorTypeIdDotOpsgenie, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestPagerduty(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestPagerduty{ - ConnectorTypeId: connectors.CreateConnectorRequestPagerdutyConnectorTypeIdDotPagerduty, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestResilient(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestResilient{ - ConnectorTypeId: connectors.CreateConnectorRequestResilientConnectorTypeIdDotResilient, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestServicenow(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestServicenow{ - ConnectorTypeId: connectors.CreateConnectorRequestServicenowConnectorTypeIdDotServicenow, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestServicenowItom(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestServicenowItom{ - ConnectorTypeId: connectors.CreateConnectorRequestServicenowItomConnectorTypeIdDotServicenowItom, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestServicenowSir(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestServicenowSir{ - ConnectorTypeId: connectors.CreateConnectorRequestServicenowSirConnectorTypeIdDotServicenowSir, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestServerLog(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestServerlog{ - ConnectorTypeId: connectors.CreateConnectorRequestServerlogConnectorTypeIdDotServerLog, - Name: connector.Name, - } - - return marshalConnectorRequest[any, any](connector, nil, nil, &request) -} - -func createConnectorRequestSlack(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestSlack{ - ConnectorTypeId: connectors.CreateConnectorRequestSlackConnectorTypeIdDotSlack, - Name: connector.Name, - } - - return marshalConnectorRequest[any](connector, nil, &request.Secrets, &request) -} - -func createConnectorRequestSlackApi(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestSlackApi{ - ConnectorTypeId: connectors.CreateConnectorRequestSlackApiConnectorTypeIdDotSlackApi, - Name: connector.Name, - } - - return marshalConnectorRequest[any](connector, nil, &request.Secrets, &request) -} - -func createConnectorRequestSwimlane(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestSwimlane{ - ConnectorTypeId: connectors.CreateConnectorRequestSwimlaneConnectorTypeIdDotSwimlane, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestTeams(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestTeams{ - ConnectorTypeId: connectors.CreateConnectorRequestTeamsConnectorTypeIdDotTeams, - Name: connector.Name, - } - - return marshalConnectorRequest[any](connector, nil, &request.Secrets, &request) -} - -func createConnectorRequestTines(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestTines{ - ConnectorTypeId: connectors.CreateConnectorRequestTinesConnectorTypeIdDotTines, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestWebhook(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestWebhook{ - ConnectorTypeId: connectors.CreateConnectorRequestWebhookConnectorTypeIdDotWebhook, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func createConnectorRequestXmatters(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.CreateConnectorRequestXmatters{ - ConnectorTypeId: connectors.CreateConnectorRequestXmattersConnectorTypeIdDotXmatters, - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestCasesWebhook(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestCasesWebhook{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestEmail(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestEmail{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestGemini(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestGemini{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestIndex(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestIndex{ - Name: connector.Name, - } - - return marshalConnectorRequest[connectors.ConfigPropertiesIndex, any](connector, &request.Config, nil, &request) -} - -func updateConnectorRequestJira(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestJira{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestOpsgenie(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestOpsgenie{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestPagerduty(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestPagerduty{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestResilient(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestResilient{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestServicenow(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestServicenow{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestServicenowItom(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestServicenowItom{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestServicenowSir(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestServicenowSir{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestServerlog(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestServerlog{ - Name: connector.Name, - } - - return marshalConnectorRequest[any, any](connector, nil, nil, &request) -} - -func updateConnectorRequestSlack(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestSlack{ - Name: connector.Name, - } - - return marshalConnectorRequest[any](connector, nil, &request.Secrets, &request) -} - -func updateConnectorRequestSlackApi(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestSlackApi{ - Name: connector.Name, - } - - return marshalConnectorRequest[any](connector, nil, &request.Secrets, &request) -} - -func updateConnectorRequestSwimlane(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestSwimlane{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestTeams(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestTeams{ - Name: connector.Name, - } - - return marshalConnectorRequest[any](connector, nil, &request.Secrets, &request) -} - -func updateConnectorRequestTines(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestTines{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestWebhook(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestWebhook{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func updateConnectorRequestXmatters(connector models.KibanaActionConnector) (io.Reader, error) { - request := connectors.UpdateConnectorRequestXmatters{ - Name: connector.Name, - } - - return marshalConnectorRequest(connector, &request.Config, &request.Secrets, &request) -} - -func connectorResponseToModel(spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - discriminator, err := properties.Discriminator() - if err != nil { - return nil, err - } - - switch connectors.ConnectorTypes(discriminator) { - - case connectors.ConnectorTypesDotCasesWebhook: - return connectorResponseToModelCasesWebhook(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotEmail: - return connectorResponseToModelEmail(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotGemini: - return connectorResponseToModelGemini(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotIndex: - return connectorResponseToModelIndex(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotJira: - return connectorResponseToModelJira(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotOpsgenie: - return connectorResponseToModelOpsgenie(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotPagerduty: - return connectorResponseToModelPagerduty(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotResilient: - return connectorResponseToModelResilient(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotServerLog: - return connectorResponseToModelServerlog(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotServicenow: - return connectorResponseToModelServicenow(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotServicenowItom: - return connectorResponseToModelServicenowItom(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotServicenowSir: - return connectorResponseToModelServicenowSir(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotSlack: - return connectorResponseToModelSlack(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotSlackApi: - return connectorResponseToModelSlackApi(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotSwimlane: - return connectorResponseToModelSwimlane(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotTeams: - return connectorResponseToModelTeams(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotTines: - return connectorResponseToModelTines(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotWebhook: - return connectorResponseToModelWebhook(discriminator, spaceID, properties) - - case connectors.ConnectorTypesDotXmatters: - return connectorResponseToModelXmatters(discriminator, spaceID, properties) - } - - return nil, fmt.Errorf("unknown connector type [%s]", discriminator) -} - -func connectorResponseToModelCasesWebhook(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesCasesWebhook() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelEmail(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesEmail() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelGemini(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesGemini() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelIndex(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesIndex() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelJira(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesJira() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelOpsgenie(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesOpsgenie() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelPagerduty(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesPagerduty() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelResilient(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesResilient() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelServerlog(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesServerlog() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelServicenow(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesServicenow() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelServicenowItom(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesServicenowItom() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelServicenowSir(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesServicenowSir() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelSlack(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesSlack() - if err != nil { - return nil, err - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - } - - return &connector, nil -} - -func connectorResponseToModelSlackApi(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesSlackApi() - if err != nil { - return nil, err - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - } - - return &connector, nil -} - -func connectorResponseToModelSwimlane(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesSwimlane() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelTeams(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesTeams() - if err != nil { - return nil, err - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - } - - return &connector, nil -} - -func connectorResponseToModelTines(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesTines() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelWebhook(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesWebhook() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} - -func connectorResponseToModelXmatters(discriminator, spaceID string, properties connectors.ConnectorResponseProperties) (*models.KibanaActionConnector, error) { - resp, err := properties.AsConnectorResponsePropertiesXmatters() - if err != nil { - return nil, err - } - - config, err := json.Marshal(resp.Config) - if err != nil { - return nil, fmt.Errorf("unable to marshal config: %w", err) - } - - isDeprecated := false - isMissingSecrets := false - - if resp.IsDeprecated != nil { - isDeprecated = *resp.IsDeprecated - } - - if resp.IsMissingSecrets != nil { - isMissingSecrets = *resp.IsMissingSecrets - } - - connector := models.KibanaActionConnector{ - ConnectorID: resp.Id, - SpaceID: spaceID, - Name: resp.Name, - ConnectorTypeID: discriminator, - IsDeprecated: isDeprecated, - IsMissingSecrets: isMissingSecrets, - IsPreconfigured: bool(resp.IsPreconfigured), - ConfigJSON: string(config), - } - - return &connector, nil -} diff --git a/internal/clients/kibana/connector_test.go b/internal/clients/kibana/connector_test.go deleted file mode 100644 index 44a352fc2..000000000 --- a/internal/clients/kibana/connector_test.go +++ /dev/null @@ -1,256 +0,0 @@ -package kibana - -import ( - "context" - "encoding/json" - "fmt" - "net/http" - "net/http/httptest" - "os" - "testing" - - "github.com/elastic/terraform-provider-elasticstack/generated/connectors" - "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/elastic/terraform-provider-elasticstack/internal/models" - "github.com/stretchr/testify/require" -) - -func Test_connectorResponseToModel(t *testing.T) { - type testCase struct { - name string - spaceId string - response connectors.ConnectorResponseProperties - expectedModel *models.KibanaActionConnector - expectedError error - } - - generator := func(connectorTypeID string, config any, propertiesGenerator func(*connectors.ConnectorResponseProperties) error) testCase { - return testCase{ - name: fmt.Sprintf("it should parse empty [%s] connector", connectorTypeID), - spaceId: "test", - response: func() connectors.ConnectorResponseProperties { - var properties connectors.ConnectorResponseProperties - err := propertiesGenerator(&properties) - require.Nil(t, err) - return properties - }(), - expectedModel: &models.KibanaActionConnector{ - SpaceID: "test", - ConnectorTypeID: connectorTypeID, - ConfigJSON: func() string { - if config == nil { - return "" - } - byt, err := json.Marshal(config) - require.Nil(t, err) - return string(byt) - }(), - }, - } - } - tests := []testCase{ - { - name: "it should fail if discriminator is unknown", - response: func() connectors.ConnectorResponseProperties { - discriminator := struct { - Discriminator string `json:"connector_type_id"` - }{"unknown-value"} - byt, err := json.Marshal(discriminator) - require.Nil(t, err) - var resp connectors.ConnectorResponseProperties - err = resp.UnmarshalJSON(byt) - require.Nil(t, err) - return resp - }(), - expectedError: func() error { return fmt.Errorf("unknown connector type [unknown-value]") }(), - }, - generator(".cases-webhook", connectors.ConfigPropertiesCasesWebhook{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesCasesWebhook(connectors.ConnectorResponsePropertiesCasesWebhook{}) - }), - generator(".email", connectors.ConfigPropertiesEmail{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesEmail(connectors.ConnectorResponsePropertiesEmail{}) - }), - generator(".gemini", connectors.ConfigPropertiesGemini{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesGemini(connectors.ConnectorResponsePropertiesGemini{}) - }), - generator(".index", connectors.ConfigPropertiesIndex{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesIndex(connectors.ConnectorResponsePropertiesIndex{}) - }), - generator(".jira", connectors.ConfigPropertiesJira{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesJira(connectors.ConnectorResponsePropertiesJira{}) - }), - generator(".opsgenie", connectors.ConfigPropertiesOpsgenie{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesOpsgenie(connectors.ConnectorResponsePropertiesOpsgenie{}) - }), - generator(".pagerduty", connectors.ConfigPropertiesPagerduty{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesPagerduty(connectors.ConnectorResponsePropertiesPagerduty{}) - }), - generator(".resilient", connectors.ConfigPropertiesResilient{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesResilient(connectors.ConnectorResponsePropertiesResilient{}) - }), - generator(".server-log", map[string]interface{}{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesServerlog(connectors.ConnectorResponsePropertiesServerlog{ - Config: &map[string]interface{}{}, - }) - }), - generator(".servicenow", connectors.ConfigPropertiesServicenow{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesServicenow(connectors.ConnectorResponsePropertiesServicenow{}) - }), - generator(".servicenow-itom", connectors.ConfigPropertiesServicenowItom{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesServicenowItom(connectors.ConnectorResponsePropertiesServicenowItom{}) - }), - generator(".servicenow-sir", connectors.ConfigPropertiesServicenow{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesServicenowSir(connectors.ConnectorResponsePropertiesServicenowSir{}) - }), - generator(".slack", nil, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesSlack(connectors.ConnectorResponsePropertiesSlack{}) - }), - generator(".slack_api", nil, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesSlackApi(connectors.ConnectorResponsePropertiesSlackApi{}) - }), - generator(".swimlane", connectors.ConfigPropertiesSwimlane{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesSwimlane(connectors.ConnectorResponsePropertiesSwimlane{}) - }), - generator(".teams", nil, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesTeams(connectors.ConnectorResponsePropertiesTeams{}) - }), - generator(".tines", connectors.ConfigPropertiesTines{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesTines(connectors.ConnectorResponsePropertiesTines{}) - }), - generator(".webhook", connectors.ConfigPropertiesWebhook{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesWebhook(connectors.ConnectorResponsePropertiesWebhook{}) - }), - generator(".xmatters", connectors.ConfigPropertiesXmatters{}, func(props *connectors.ConnectorResponseProperties) error { - return props.FromConnectorResponsePropertiesXmatters(connectors.ConnectorResponsePropertiesXmatters{}) - }), - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - model, err := connectorResponseToModel(tt.spaceId, tt.response) - - if tt.expectedError == nil { - require.Nil(t, err) - require.Equal(t, tt.expectedModel, model) - } else { - require.Equal(t, tt.expectedError, err) - } - }) - } -} - -func TestGetConnectorByName(t *testing.T) { - const getConnectorsResponse = `[ - { - "id": "c55b6eb0-6bad-11eb-9f3b-611eebc6c3ad", - "connector_type_id": ".index", - "name": "my-connector", - "config": { - "index": "test-index", - "refresh": false, - "executionTimeField": null - }, - "is_preconfigured": false, - "is_deprecated": false, - "is_missing_secrets": false, - "referenced_by_count": 3 - }, - { - "id": "d55b6eb0-6bad-11eb-9f3b-611eebc6c3ad", - "connector_type_id": ".index", - "name": "doubledup-connector", - "config": { - "index": "test-index", - "refresh": false, - "executionTimeField": null - }, - "is_preconfigured": false, - "is_deprecated": false, - "is_missing_secrets": false, - "referenced_by_count": 3 - }, - { - "id": "855b6eb0-6bad-11eb-9f3b-611eebc6c3ad", - "connector_type_id": ".index", - "name": "doubledup-connector", - "config": { - "index": "test-index", - "refresh": false, - "executionTimeField": null - }, - "is_preconfigured": false, - "is_deprecated": false, - "is_missing_secrets": false, - "referenced_by_count": 0 - } - ]` - - const emptyConnectorsResponse = `[]` - - var requests []*http.Request - var mockResponses []string - var httpStatus int - server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { - requests = append(requests, req) - - if len(mockResponses) > 0 { - r := []byte(mockResponses[0]) - rw.Header().Add("X-Elastic-Product", "Elasticsearch") - rw.Header().Add("Content-Type", "application/json") - rw.WriteHeader(httpStatus) - _, err := rw.Write(r) - require.NoError(t, err) - mockResponses = mockResponses[1:] - } else { - t.Fatalf("Unexpected request: %s %s", req.Method, req.URL.Path) - } - })) - defer server.Close() - - httpStatus = http.StatusOK - mockResponses = append(mockResponses, getConnectorsResponse) - - err := os.Setenv("ELASTICSEARCH_URL", server.URL) - require.NoError(t, err) - err = os.Setenv("KIBANA_ENDPOINT", server.URL) - require.NoError(t, err) - - apiClient, err := clients.NewAcceptanceTestingClient() - require.NoError(t, err) - - connector, diags := SearchConnectors(context.Background(), apiClient, "my-connector", "default", "") - require.Nil(t, diags) - require.NotNil(t, connector) - - mockResponses = append(mockResponses, getConnectorsResponse) - failConnector, diags := SearchConnectors(context.Background(), apiClient, "failwhale", "default", "") - require.Nil(t, diags) - require.Empty(t, failConnector) - - mockResponses = append(mockResponses, getConnectorsResponse) - dupConnector, diags := SearchConnectors(context.Background(), apiClient, "doubledup-connector", "default", "") - require.Nil(t, diags) - require.Len(t, dupConnector, 2) - - mockResponses = append(mockResponses, getConnectorsResponse) - wrongConnectorType, diags := SearchConnectors(context.Background(), apiClient, "my-connector", "default", ".slack") - require.Nil(t, diags) - require.Empty(t, wrongConnectorType) - - mockResponses = append(mockResponses, getConnectorsResponse) - successConnector, diags := SearchConnectors(context.Background(), apiClient, "my-connector", "default", ".index") - require.Nil(t, diags) - require.Len(t, successConnector, 1) - - mockResponses = append(mockResponses, emptyConnectorsResponse) - emptyConnector, diags := SearchConnectors(context.Background(), apiClient, "my-connector", "default", "") - require.Nil(t, diags) - require.Empty(t, emptyConnector) - - httpStatus = http.StatusBadGateway - mockResponses = append(mockResponses, emptyConnectorsResponse) - fail, diags := SearchConnectors(context.Background(), apiClient, "my-connector", "default", "") - require.NotNil(t, diags) - require.Nil(t, fail) - -} diff --git a/internal/clients/kibana_oapi/connector.go b/internal/clients/kibana_oapi/connector.go new file mode 100644 index 000000000..af6177773 --- /dev/null +++ b/internal/clients/kibana_oapi/connector.go @@ -0,0 +1,513 @@ +package kibana_oapi + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "net/http" + "strings" + + "github.com/elastic/terraform-provider-elasticstack/generated/kbapi" + "github.com/elastic/terraform-provider-elasticstack/internal/models" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-log/tflog" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" +) + +func CreateConnector(ctx context.Context, client *Client, connectorOld models.KibanaActionConnector) (string, diag.Diagnostics) { + body, err := createConnectorRequestBody(connectorOld) + if err != nil { + return "", diag.FromErr(err) + } + + resp, err := client.API.PostActionsConnectorIdWithResponse( + ctx, connectorOld.SpaceID, connectorOld.ConnectorID, body, + // When there isn't an explicit connector ID the request path will include a trailing slash + // Kibana 8.7 and lower return a 404 for such request paths, whilst 8.8+ correctly handle then empty ID parameter + // This request editor ensures that the trailing slash is removed allowing all supported + // Stack versions to correctly create connectors without an explicit ID + func(ctx context.Context, req *http.Request) error { + if connectorOld.ConnectorID == "" { + req.URL.Path = strings.TrimRight(req.URL.Path, "/") + } + return nil + }, + ) + if err != nil { + return "", diag.FromErr(err) + } + + switch resp.StatusCode() { + case http.StatusOK: + return resp.JSON200.Id, nil + default: + return "", reportUnknownErrorSDK(resp.StatusCode(), resp.Body) + } +} + +func UpdateConnector(ctx context.Context, client *Client, connectorOld models.KibanaActionConnector) (string, diag.Diagnostics) { + body, err := updateConnectorRequestBody(connectorOld) + if err != nil { + return "", diag.FromErr(err) + } + + resp, err := client.API.PutActionsConnectorIdWithResponse(ctx, connectorOld.SpaceID, connectorOld.ConnectorID, body) + if err != nil { + return "", diag.Errorf("unable to update connector: [%v]", err) + } + + switch resp.StatusCode() { + case http.StatusOK: + return resp.JSON200.Id, nil + default: + return "", reportUnknownErrorSDK(resp.StatusCode(), resp.Body) + } +} + +func GetConnector(ctx context.Context, client *Client, connectorID, spaceID string) (*models.KibanaActionConnector, diag.Diagnostics) { + resp, err := client.API.GetActionsConnectorIdWithResponse(ctx, spaceID, connectorID) + if err != nil { + return nil, diag.Errorf("unable to get connector: [%v]", err) + } + + switch resp.StatusCode() { + case http.StatusOK: + return ConnectorResponseToModel(spaceID, resp.JSON200) + case http.StatusNotFound: + return nil, nil + default: + return nil, reportUnknownErrorSDK(resp.StatusCode(), resp.Body) + } +} + +func SearchConnectors(ctx context.Context, client *Client, connectorName, spaceID, connectorTypeID string) ([]*models.KibanaActionConnector, diag.Diagnostics) { + resp, err := client.API.GetActionsConnectorsWithResponse(ctx, spaceID) + if err != nil { + return nil, diag.Errorf("unable to get connectors: [%v]", err) + } + + if resp.StatusCode() != http.StatusOK { + return nil, reportUnknownErrorSDK(resp.StatusCode(), resp.Body) + } + + foundConnectors := []*models.KibanaActionConnector{} + for _, connector := range *resp.JSON200 { + if connector.Name != connectorName { + continue + } + + if connectorTypeID != "" && connector.ConnectorTypeId != connectorTypeID { + continue + } + + c, diags := ConnectorResponseToModel(spaceID, &connector) + if diags.HasError() { + return nil, diags + } + + foundConnectors = append(foundConnectors, c) + } + if len(foundConnectors) == 0 { + tflog.Debug(ctx, fmt.Sprintf("no connectors found with name [%s/%s] and type [%s]", spaceID, connectorName, connectorTypeID)) + } + + return foundConnectors, nil +} + +func ConnectorResponseToModel(spaceID string, connector *kbapi.ConnectorResponse) (*models.KibanaActionConnector, diag.Diagnostics) { + if connector == nil { + return nil, diag.Errorf("connector response is nil") + } + + var configJSON []byte + if connector.Config != nil { + configMap := *connector.Config + for k, v := range configMap { + if v == nil { + delete(configMap, k) + } + } + + var err error + configJSON, err = json.Marshal(configMap) + if err != nil { + return nil, diag.Errorf("unable to marshal config: %v", err) + } + + // If we have a specific config type, marshal into and out of that to + // remove any extra fields Kibana may have returned. + handler, ok := connectorConfigHandlers[connector.ConnectorTypeId] + if ok { + configJSONString, err := handler.remarshalConfig(string(configJSON)) + if err != nil { + return nil, diag.Errorf("failed to remarshal config: %v", err) + } + + configJSON = []byte(configJSONString) + } + } + + model := &models.KibanaActionConnector{ + ConnectorID: connector.Id, + SpaceID: spaceID, + Name: connector.Name, + ConfigJSON: string(configJSON), + ConnectorTypeID: connector.ConnectorTypeId, + IsDeprecated: connector.IsDeprecated, + IsPreconfigured: connector.IsPreconfigured, + } + + if connector.IsMissingSecrets != nil { + model.IsMissingSecrets = *connector.IsMissingSecrets + } + + return model, nil +} + +func DeleteConnector(ctx context.Context, client *Client, connectorID string, spaceID string) diag.Diagnostics { + resp, err := client.API.DeleteActionsConnectorIdWithResponse(ctx, spaceID, connectorID) + if err != nil { + return diag.Errorf("unable to delete connector: [%v]", err) + } + + if resp.StatusCode() != http.StatusOK && resp.StatusCode() != http.StatusNoContent { + return reportUnknownErrorSDK(resp.StatusCode(), resp.Body) + } + + return nil +} + +type connectorConfigHandler struct { + defaults func(plan, backend string) (string, error) + remarshalConfig func(config string) (string, error) +} + +var connectorConfigHandlers = map[string]connectorConfigHandler{ + ".cases-webhook": { + defaults: connectorConfigWithDefaultsCasesWebhook, + remarshalConfig: remarshalConfig[kbapi.CasesWebhookConfig], + }, + ".email": { + defaults: connectorConfigWithDefaultsEmail, + remarshalConfig: remarshalConfig[kbapi.EmailConfig], + }, + ".gemini": { + defaults: connectorConfigWithDefaultsGemini, + remarshalConfig: remarshalConfig[kbapi.GeminiConfig], + }, + ".index": { + defaults: connectorConfigWithDefaultsIndex, + remarshalConfig: remarshalConfig[kbapi.IndexConfig], + }, + ".jira": { + defaults: connectorConfigWithDefaultsJira, + remarshalConfig: remarshalConfig[kbapi.JiraConfig], + }, + ".opsgenie": { + defaults: connectorConfigWithDefaultsOpsgenie, + remarshalConfig: remarshalConfig[kbapi.OpsgenieConfig], + }, + ".pagerduty": { + defaults: connectorConfigWithDefaultsPagerduty, + remarshalConfig: remarshalConfig[kbapi.PagerdutyConfig], + }, + ".resilient": { + defaults: connectorConfigWithDefaultsResilient, + remarshalConfig: remarshalConfig[kbapi.ResilientConfig], + }, + ".servicenow": { + defaults: connectorConfigWithDefaultsServicenow, + remarshalConfig: remarshalConfig[kbapi.ServicenowConfig], + }, + ".servicenow-itom": { + defaults: connectorConfigWithDefaultsServicenowItom, + remarshalConfig: remarshalConfig[kbapi.ServicenowItomConfig], + }, + ".servicenow-sir": { + defaults: connectorConfigWithDefaultsServicenowSir, + remarshalConfig: remarshalConfig[kbapi.ServicenowConfig], + }, + ".swimlane": { + defaults: connectorConfigWithDefaultsSwimlane, + remarshalConfig: remarshalConfig[kbapi.SwimlaneConfig], + }, + ".tines": { + defaults: connectorConfigWithDefaultsTines, + remarshalConfig: remarshalConfig[kbapi.TinesConfig], + }, + ".webhook": { + defaults: connectorConfigWithDefaultsWebhook, + remarshalConfig: remarshalConfig[kbapi.WebhookConfig], + }, + ".xmatters": { + defaults: connectorConfigWithDefaultsXmatters, + remarshalConfig: remarshalConfig[kbapi.XmattersConfig], + }, +} + +func ConnectorConfigWithDefaults(connectorTypeID, plan, backend, state string) (string, error) { + handler, ok := connectorConfigHandlers[connectorTypeID] + if !ok { + return plan, errors.New("unknown connector type ID: " + connectorTypeID) + } + + return handler.defaults(plan, backend) +} + +// User can omit optonal fields in config JSON. +// The func adds empty optional fields to the diff. +// Otherwise plan command shows omitted fields as the diff, +// because backend returns all fields. +func remarshalConfig[T any](plan string) (string, error) { + var config T + if err := json.Unmarshal([]byte(plan), &config); err != nil { + return "", err + } + customJSON, err := json.Marshal(config) + if err != nil { + return "", err + } + return string(customJSON), nil +} + +func connectorConfigWithDefaultsCasesWebhook(plan, _ string) (string, error) { + var custom kbapi.CasesWebhookConfig + if err := json.Unmarshal([]byte(plan), &custom); err != nil { + return "", err + } + if custom.CreateIncidentMethod == nil { + custom.CreateIncidentMethod = utils.Pointer(kbapi.CasesWebhookConfigCreateIncidentMethodPost) + } + if custom.HasAuth == nil { + custom.HasAuth = utils.Pointer(true) + } + if custom.UpdateIncidentMethod == nil { + custom.UpdateIncidentMethod = utils.Pointer(kbapi.CasesWebhookConfigUpdateIncidentMethodPut) + } + customJSON, err := json.Marshal(custom) + if err != nil { + return "", err + } + return string(customJSON), nil +} + +func connectorConfigWithDefaultsEmail(plan, _ string) (string, error) { + var custom kbapi.EmailConfig + if err := json.Unmarshal([]byte(plan), &custom); err != nil { + return "", err + } + if custom.HasAuth == nil { + custom.HasAuth = utils.Pointer(true) + } + if custom.Service == nil { + custom.Service = utils.Pointer(kbapi.EmailConfigService("other")) + } + customJSON, err := json.Marshal(custom) + if err != nil { + return "", err + } + return string(customJSON), nil +} + +func connectorConfigWithDefaultsGemini(plan, _ string) (string, error) { + return plan, nil +} + +func connectorConfigWithDefaultsIndex(plan, _ string) (string, error) { + var custom kbapi.IndexConfig + if err := json.Unmarshal([]byte(plan), &custom); err != nil { + return "", err + } + if custom.Refresh == nil { + custom.Refresh = utils.Pointer(false) + } + customJSON, err := json.Marshal(custom) + if err != nil { + return "", err + } + return string(customJSON), nil +} + +func connectorConfigWithDefaultsJira(plan, _ string) (string, error) { + return remarshalConfig[kbapi.JiraConfig](plan) +} + +func connectorConfigWithDefaultsOpsgenie(plan, _ string) (string, error) { + return plan, nil +} + +func connectorConfigWithDefaultsPagerduty(plan, _ string) (string, error) { + return remarshalConfig[kbapi.PagerdutyConfig](plan) +} + +func connectorConfigWithDefaultsResilient(plan, _ string) (string, error) { + return plan, nil +} + +func connectorConfigWithDefaultsServicenow(plan, backend string) (string, error) { + var planConfig kbapi.ServicenowConfig + if err := json.Unmarshal([]byte(plan), &planConfig); err != nil { + return "", err + } + var backendConfig kbapi.ServicenowConfig + if err := json.Unmarshal([]byte(backend), &backendConfig); err != nil { + return "", err + } + if planConfig.IsOAuth == nil && backendConfig.IsOAuth != nil && !*backendConfig.IsOAuth { + planConfig.IsOAuth = utils.Pointer(false) + } + if planConfig.UsesTableApi == nil { + planConfig.UsesTableApi = utils.Pointer(true) + } + customJSON, err := json.Marshal(planConfig) + if err != nil { + return "", err + } + return string(customJSON), nil +} + +func connectorConfigWithDefaultsServicenowItom(plan, _ string) (string, error) { + var custom kbapi.ServicenowItomConfig + if err := json.Unmarshal([]byte(plan), &custom); err != nil { + return "", err + } + if custom.IsOAuth == nil { + custom.IsOAuth = utils.Pointer(false) + } + customJSON, err := json.Marshal(custom) + if err != nil { + return "", err + } + return string(customJSON), nil +} + +func connectorConfigWithDefaultsServicenowSir(plan, backend string) (string, error) { + return connectorConfigWithDefaultsServicenow(plan, backend) +} + +func connectorConfigWithDefaultsSwimlane(plan, _ string) (string, error) { + var custom kbapi.SwimlaneConfig + if err := json.Unmarshal([]byte(plan), &custom); err != nil { + return "", err + } + if custom.Mappings == nil { + custom.Mappings = &struct { + AlertIdConfig *struct { + FieldType string "json:\"fieldType\"" + Id string "json:\"id\"" + Key string "json:\"key\"" + Name string "json:\"name\"" + } "json:\"alertIdConfig,omitempty\"" + CaseIdConfig *struct { + FieldType string "json:\"fieldType\"" + Id string "json:\"id\"" + Key string "json:\"key\"" + Name string "json:\"name\"" + } "json:\"caseIdConfig,omitempty\"" + CaseNameConfig *struct { + FieldType string "json:\"fieldType\"" + Id string "json:\"id\"" + Key string "json:\"key\"" + Name string "json:\"name\"" + } "json:\"caseNameConfig,omitempty\"" + CommentsConfig *struct { + FieldType string "json:\"fieldType\"" + Id string "json:\"id\"" + Key string "json:\"key\"" + Name string "json:\"name\"" + } "json:\"commentsConfig,omitempty\"" + DescriptionConfig *struct { + FieldType string "json:\"fieldType\"" + Id string "json:\"id\"" + Key string "json:\"key\"" + Name string "json:\"name\"" + } "json:\"descriptionConfig,omitempty\"" + RuleNameConfig *struct { + FieldType string "json:\"fieldType\"" + Id string "json:\"id\"" + Key string "json:\"key\"" + Name string "json:\"name\"" + } "json:\"ruleNameConfig,omitempty\"" + SeverityConfig *struct { + FieldType string "json:\"fieldType\"" + Id string "json:\"id\"" + Key string "json:\"key\"" + Name string "json:\"name\"" + } "json:\"severityConfig,omitempty\"" + }{} + } + customJSON, err := json.Marshal(custom) + if err != nil { + return "", err + } + return string(customJSON), nil +} + +func connectorConfigWithDefaultsTines(plan, _ string) (string, error) { + return plan, nil +} + +func connectorConfigWithDefaultsWebhook(plan, _ string) (string, error) { + return plan, nil +} + +func connectorConfigWithDefaultsXmatters(plan, _ string) (string, error) { + var custom kbapi.XmattersConfig + if err := json.Unmarshal([]byte(plan), &custom); err != nil { + return "", err + } + if custom.UsesBasic == nil { + custom.UsesBasic = utils.Pointer(true) + } + customJSON, err := json.Marshal(custom) + if err != nil { + return "", err + } + return string(customJSON), nil +} + +func createConnectorRequestBody(connector models.KibanaActionConnector) (kbapi.PostActionsConnectorIdJSONRequestBody, error) { + req := kbapi.PostActionsConnectorIdJSONRequestBody{ + ConnectorTypeId: connector.ConnectorTypeID, + Name: connector.Name, + Config: &kbapi.CreateConnectorConfig{}, + Secrets: &kbapi.CreateConnectorSecrets{}, + } + + if len(connector.ConfigJSON) > 0 { + if err := json.Unmarshal([]byte(connector.ConfigJSON), &req.Config.AdditionalProperties); err != nil { + return kbapi.PostActionsConnectorIdJSONRequestBody{}, fmt.Errorf("failed to unmarshal [config] attribute: %w", err) + } + } + + if len(connector.SecretsJSON) > 0 { + if err := json.Unmarshal([]byte(connector.SecretsJSON), &req.Secrets.AdditionalProperties); err != nil { + return kbapi.PostActionsConnectorIdJSONRequestBody{}, fmt.Errorf("failed to unmarshal [secrets] attribute: %w", err) + } + } + + return req, nil +} + +func updateConnectorRequestBody(connector models.KibanaActionConnector) (kbapi.PutActionsConnectorIdJSONRequestBody, error) { + req := kbapi.PutActionsConnectorIdJSONRequestBody{ + Name: connector.Name, + Config: &kbapi.UpdateConnectorConfig{}, + Secrets: &kbapi.UpdateConnectorSecrets{}, + } + + if len(connector.ConfigJSON) > 0 { + if err := json.Unmarshal([]byte(connector.ConfigJSON), &req.Config.AdditionalProperties); err != nil { + return kbapi.PutActionsConnectorIdJSONRequestBody{}, fmt.Errorf("failed to unmarshal [config] attribute: %w", err) + } + } + + if len(connector.SecretsJSON) > 0 { + if err := json.Unmarshal([]byte(connector.SecretsJSON), &req.Secrets.AdditionalProperties); err != nil { + return kbapi.PutActionsConnectorIdJSONRequestBody{}, fmt.Errorf("failed to unmarshal [secrets] attribute: %w", err) + } + } + + return req, nil +} diff --git a/internal/clients/kibana_oapi/connector_test.go b/internal/clients/kibana_oapi/connector_test.go new file mode 100644 index 000000000..af45f0a27 --- /dev/null +++ b/internal/clients/kibana_oapi/connector_test.go @@ -0,0 +1,264 @@ +package kibana_oapi_test + +import ( + "context" + "net/http" + "net/http/httptest" + "os" + "testing" + + "github.com/elastic/terraform-provider-elasticstack/generated/kbapi" + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/elastic/terraform-provider-elasticstack/internal/models" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/stretchr/testify/require" +) + +func Test_connectorResponseToModel(t *testing.T) { + type testCase struct { + name string + spaceId string + response *kbapi.ConnectorResponse + expectedModel *models.KibanaActionConnector + expectedError diag.Diagnostics + } + tests := []testCase{ + { + name: "should return an error diag when response is nil", + spaceId: "default", + response: nil, + expectedModel: nil, + expectedError: diag.Errorf("connector response is nil"), + }, + { + name: "should map valid connector response to model", + spaceId: "default", + response: &kbapi.ConnectorResponse{ + Id: "test-id", + ConnectorTypeId: ".slack", + Name: "test-connector", + IsPreconfigured: false, + IsDeprecated: false, + IsMissingSecrets: func() *bool { b := false; return &b }(), + Config: func() *map[string]interface{} { + m := map[string]interface{}{"webhookUrl": "https://hooks.slack.com/services/xxx"} + return &m + }(), + }, + expectedModel: &models.KibanaActionConnector{ + ConnectorID: "test-id", + SpaceID: "default", + Name: "test-connector", + ConnectorTypeID: ".slack", + ConfigJSON: `{"webhookUrl":"https://hooks.slack.com/services/xxx"}`, + IsDeprecated: false, + IsMissingSecrets: false, + IsPreconfigured: false, + }, + expectedError: nil, + }, + { + name: "should handle empty config", + spaceId: "default", + response: &kbapi.ConnectorResponse{ + Id: "empty-id", + ConnectorTypeId: ".webhook", + Name: "empty-connector", + IsPreconfigured: false, + IsDeprecated: false, + IsMissingSecrets: func() *bool { b := false; return &b }(), + Config: nil, + }, + expectedModel: &models.KibanaActionConnector{ + ConnectorID: "empty-id", + SpaceID: "default", + Name: "empty-connector", + ConnectorTypeID: ".webhook", + ConfigJSON: "", + IsDeprecated: false, + IsMissingSecrets: false, + IsPreconfigured: false, + }, + expectedError: nil, + }, + { + name: "should handle missing optional fields", + spaceId: "default", + response: &kbapi.ConnectorResponse{ + Id: "missing-fields", + ConnectorTypeId: ".webhook", + Name: "missing-connector", + }, + expectedModel: &models.KibanaActionConnector{ + ConnectorID: "missing-fields", + SpaceID: "default", + Name: "missing-connector", + ConnectorTypeID: ".webhook", + ConfigJSON: "", + IsDeprecated: false, + IsMissingSecrets: false, + IsPreconfigured: false, + }, + expectedError: nil, + }, + { + name: "should handle non-default spaceId", + spaceId: "custom-space", + response: &kbapi.ConnectorResponse{ + Id: "custom-id", + ConnectorTypeId: ".webhook", + Name: "custom-connector", + IsPreconfigured: true, + IsDeprecated: true, + IsMissingSecrets: func() *bool { b := true; return &b }(), + Config: func() *map[string]interface{} { + m := map[string]interface{}{"url": "https://example.com"} + return &m + }(), + }, + expectedModel: &models.KibanaActionConnector{ + ConnectorID: "custom-id", + SpaceID: "custom-space", + Name: "custom-connector", + ConnectorTypeID: ".webhook", + ConfigJSON: `{"url":"https://example.com"}`, + IsDeprecated: true, + IsMissingSecrets: true, + IsPreconfigured: true, + }, + expectedError: nil, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + model, err := kibana_oapi.ConnectorResponseToModel(tt.spaceId, tt.response) + + if tt.expectedError == nil { + require.Nil(t, err) + require.Equal(t, tt.expectedModel, model) + } else { + require.Equal(t, tt.expectedError, err) + } + }) + } +} + +func TestGetConnectorByName(t *testing.T) { + const getConnectorsResponse = `[ + { + "id": "c55b6eb0-6bad-11eb-9f3b-611eebc6c3ad", + "connector_type_id": ".index", + "name": "my-connector", + "config": { + "index": "test-index", + "refresh": false, + "executionTimeField": null + }, + "is_preconfigured": false, + "is_deprecated": false, + "is_missing_secrets": false, + "referenced_by_count": 3 + }, + { + "id": "d55b6eb0-6bad-11eb-9f3b-611eebc6c3ad", + "connector_type_id": ".index", + "name": "doubledup-connector", + "config": { + "index": "test-index", + "refresh": false, + "executionTimeField": null + }, + "is_preconfigured": false, + "is_deprecated": false, + "is_missing_secrets": false, + "referenced_by_count": 3 + }, + { + "id": "855b6eb0-6bad-11eb-9f3b-611eebc6c3ad", + "connector_type_id": ".index", + "name": "doubledup-connector", + "config": { + "index": "test-index", + "refresh": false, + "executionTimeField": null + }, + "is_preconfigured": false, + "is_deprecated": false, + "is_missing_secrets": false, + "referenced_by_count": 0 + } + ]` + + const emptyConnectorsResponse = `[]` + + var requests []*http.Request + var mockResponses []string + var httpStatus int + server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { + requests = append(requests, req) + + if len(mockResponses) > 0 { + r := []byte(mockResponses[0]) + rw.Header().Add("X-Elastic-Product", "Elasticsearch") + rw.Header().Add("Content-Type", "application/json") + rw.WriteHeader(httpStatus) + _, err := rw.Write(r) + require.NoError(t, err) + mockResponses = mockResponses[1:] + } else { + t.Fatalf("Unexpected request: %s %s", req.Method, req.URL.Path) + } + })) + defer server.Close() + + httpStatus = http.StatusOK + mockResponses = append(mockResponses, getConnectorsResponse) + + err := os.Setenv("ELASTICSEARCH_URL", server.URL) + require.NoError(t, err) + err = os.Setenv("KIBANA_ENDPOINT", server.URL) + require.NoError(t, err) + + apiClient, err := clients.NewAcceptanceTestingClient() + require.NoError(t, err) + + oapiClient, err := apiClient.GetKibanaOapiClient() + require.NoError(t, err) + + connector, diags := kibana_oapi.SearchConnectors(context.Background(), oapiClient, "my-connector", "default", "") + require.Nil(t, diags) + require.NotNil(t, connector) + + mockResponses = append(mockResponses, getConnectorsResponse) + failConnector, diags := kibana_oapi.SearchConnectors(context.Background(), oapiClient, "failwhale", "default", "") + require.Nil(t, diags) + require.Empty(t, failConnector) + + mockResponses = append(mockResponses, getConnectorsResponse) + dupConnector, diags := kibana_oapi.SearchConnectors(context.Background(), oapiClient, "doubledup-connector", "default", "") + require.Nil(t, diags) + require.Len(t, dupConnector, 2) + + mockResponses = append(mockResponses, getConnectorsResponse) + wrongConnectorType, diags := kibana_oapi.SearchConnectors(context.Background(), oapiClient, "my-connector", "default", ".slack") + require.Nil(t, diags) + require.Empty(t, wrongConnectorType) + + mockResponses = append(mockResponses, getConnectorsResponse) + successConnector, diags := kibana_oapi.SearchConnectors(context.Background(), oapiClient, "my-connector", "default", ".index") + require.Nil(t, diags) + require.Len(t, successConnector, 1) + + mockResponses = append(mockResponses, emptyConnectorsResponse) + emptyConnector, diags := kibana_oapi.SearchConnectors(context.Background(), oapiClient, "my-connector", "default", "") + require.Nil(t, diags) + require.Empty(t, emptyConnector) + + httpStatus = http.StatusBadGateway + mockResponses = append(mockResponses, emptyConnectorsResponse) + fail, diags := kibana_oapi.SearchConnectors(context.Background(), oapiClient, "my-connector", "default", "") + require.NotNil(t, diags) + require.Nil(t, fail) +} diff --git a/internal/clients/kibana_oapi/errors.go b/internal/clients/kibana_oapi/errors.go index 08aef219d..d00822fc8 100644 --- a/internal/clients/kibana_oapi/errors.go +++ b/internal/clients/kibana_oapi/errors.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/hashicorp/terraform-plugin-framework/diag" + sdkdiag "github.com/hashicorp/terraform-plugin-sdk/v2/diag" ) func reportUnknownError(statusCode int, body []byte) diag.Diagnostics { @@ -14,3 +15,13 @@ func reportUnknownError(statusCode int, body []byte) diag.Diagnostics { ), } } + +func reportUnknownErrorSDK(statusCode int, body []byte) sdkdiag.Diagnostics { + return sdkdiag.Diagnostics{ + sdkdiag.Diagnostic{ + Severity: sdkdiag.Error, + Summary: fmt.Sprintf("Unexpected status code from server: got HTTP %d", statusCode), + Detail: string(body), + }, + } +} diff --git a/internal/kibana/connector.go b/internal/kibana/connector.go index 37d86c333..419e1500d 100644 --- a/internal/kibana/connector.go +++ b/internal/kibana/connector.go @@ -4,22 +4,26 @@ import ( "context" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/go-version" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +var MinVersionSupportingPreconfiguredIDs = version.Must(version.NewVersion("8.8.0")) + func ResourceActionConnector() *schema.Resource { var connectorSchema = map[string]*schema.Schema{ "connector_id": { - Description: "A UUID v1 or v4 to use instead of a randomly generated ID.", - Type: schema.TypeString, - Computed: true, - Optional: true, - ForceNew: true, + Description: "A UUID v1 or v4 to use instead of a randomly generated ID.", + Type: schema.TypeString, + Computed: true, + Optional: true, + ForceNew: true, + ValidateFunc: validation.IsUUID, }, "space_id": { Description: "An identifier for the space. If space_id is not provided, the default space is used.", @@ -116,7 +120,7 @@ func connectorCustomizeDiff(ctx context.Context, rd *schema.ResourceDiff, in int stateJSON := state.AsString() - customJSON, err := kibana.ConnectorConfigWithDefaults(oldTypeID, newJSON, oldJSON, stateJSON) + customJSON, err := kibana_oapi.ConnectorConfigWithDefaults(oldTypeID, newJSON, oldJSON, stateJSON) if err != nil { return err } @@ -129,12 +133,26 @@ func resourceConnectorCreate(ctx context.Context, d *schema.ResourceData, meta i return diags } + oapiClient, err := client.GetKibanaOapiClient() + if err != nil { + return diag.FromErr(err) + } + connectorOld, diags := expandActionConnector(d) if diags.HasError() { return diags } - connectorID, diags := kibana.CreateConnector(ctx, client, connectorOld) + version, diags := client.ServerVersion(ctx) + if diags.HasError() { + return diags + } + + if connectorOld.ConnectorID != "" && version.LessThan(MinVersionSupportingPreconfiguredIDs) { + return diag.Errorf("Preconfigured connector IDs are only supported for Elastic Stack v%s and above. Either remove the `connector_id` attribute or upgrade your target cluster to supported version", MinVersionSupportingPreconfiguredIDs) + } + + connectorID, diags := kibana_oapi.CreateConnector(ctx, oapiClient, connectorOld) if diags.HasError() { return diags @@ -152,6 +170,11 @@ func resourceConnectorUpdate(ctx context.Context, d *schema.ResourceData, meta i return diags } + oapiClient, err := client.GetKibanaOapiClient() + if err != nil { + return diag.FromErr(err) + } + connectorOld, diags := expandActionConnector(d) if diags.HasError() { return diags @@ -163,7 +186,7 @@ func resourceConnectorUpdate(ctx context.Context, d *schema.ResourceData, meta i } connectorOld.ConnectorID = compositeIDold.ResourceId - connectorID, diags := kibana.UpdateConnector(ctx, client, connectorOld) + connectorID, diags := kibana_oapi.UpdateConnector(ctx, oapiClient, connectorOld) if diags.HasError() { return diags @@ -181,12 +204,17 @@ func resourceConnectorRead(ctx context.Context, d *schema.ResourceData, meta int return diags } + oapiClient, err := client.GetKibanaOapiClient() + if err != nil { + return diag.FromErr(err) + } + compositeID, diags := clients.CompositeIdFromStr(d.Id()) if diags.HasError() { return diags } - connector, diags := kibana.GetConnector(ctx, client, compositeID.ResourceId, compositeID.ClusterId) + connector, diags := kibana_oapi.GetConnector(ctx, oapiClient, compositeID.ResourceId, compositeID.ClusterId) if connector == nil && diags == nil { d.SetId("") return diags @@ -204,6 +232,11 @@ func resourceConnectorDelete(ctx context.Context, d *schema.ResourceData, meta i return diags } + oapiClient, err := client.GetKibanaOapiClient() + if err != nil { + return diag.FromErr(err) + } + compositeID, diags := clients.CompositeIdFromStr(d.Id()) if diags.HasError() { return diags @@ -211,7 +244,7 @@ func resourceConnectorDelete(ctx context.Context, d *schema.ResourceData, meta i spaceId := d.Get("space_id").(string) - if diags := kibana.DeleteConnector(ctx, client, compositeID.ResourceId, spaceId); diags.HasError() { + if diags := kibana_oapi.DeleteConnector(ctx, oapiClient, compositeID.ResourceId, spaceId); diags.HasError() { return diags } @@ -223,6 +256,7 @@ func expandActionConnector(d *schema.ResourceData) (models.KibanaActionConnector var diags diag.Diagnostics connector := models.KibanaActionConnector{ + ConnectorID: d.Get("connector_id").(string), SpaceID: d.Get("space_id").(string), Name: d.Get("name").(string), ConnectorTypeID: d.Get("connector_type_id").(string), diff --git a/internal/kibana/connector_data_source.go b/internal/kibana/connector_data_source.go index 03aff8a25..07ec1a249 100644 --- a/internal/kibana/connector_data_source.go +++ b/internal/kibana/connector_data_source.go @@ -4,7 +4,7 @@ import ( "context" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -66,11 +66,15 @@ func datasourceConnectorRead(ctx context.Context, d *schema.ResourceData, meta i if diags.HasError() { return diags } + oapiClient, err := client.GetKibanaOapiClient() + if err != nil { + return diag.FromErr(err) + } connectorName := d.Get("name").(string) spaceId := d.Get("space_id").(string) connectorType := d.Get("connector_type_id").(string) - foundConnectors, diags := kibana.SearchConnectors(ctx, client, connectorName, spaceId, connectorType) + foundConnectors, diags := kibana_oapi.SearchConnectors(ctx, oapiClient, connectorName, spaceId, connectorType) if diags.HasError() { return diags } diff --git a/internal/kibana/connector_test.go b/internal/kibana/connector_test.go index c98658a16..172b75b78 100644 --- a/internal/kibana/connector_test.go +++ b/internal/kibana/connector_test.go @@ -8,8 +8,10 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/elastic/terraform-provider-elasticstack/internal/kibana" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" + "github.com/google/uuid" "github.com/hashicorp/go-version" sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" @@ -21,7 +23,11 @@ func TestAccResourceKibanaConnectorCasesWebhook(t *testing.T) { connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - create := func(name string) string { + create := func(name, id string) string { + idAttribute := "" + if id != "" { + idAttribute = fmt.Sprintf(`connector_id = "%s"`, id) + } return fmt.Sprintf(` provider "elasticstack" { elasticsearch {} @@ -30,6 +36,7 @@ func TestAccResourceKibanaConnectorCasesWebhook(t *testing.T) { resource "elasticstack_kibana_action_connector" "test" { name = "%s" + %s config = jsonencode({ createIncidentJson = "{}" createIncidentResponseKey = "key" @@ -46,10 +53,14 @@ func TestAccResourceKibanaConnectorCasesWebhook(t *testing.T) { }) connector_type_id = ".cases-webhook" }`, - name) + name, idAttribute) } - update := func(name string) string { + update := func(name, id string) string { + idAttribute := "" + if id != "" { + idAttribute = fmt.Sprintf(`connector_id = "%s"`, id) + } return fmt.Sprintf(` provider "elasticstack" { elasticsearch {} @@ -58,6 +69,7 @@ func TestAccResourceKibanaConnectorCasesWebhook(t *testing.T) { resource "elasticstack_kibana_action_connector" "test" { name = "Updated %s" + %s config = jsonencode({ createIncidentJson = "{}" createIncidentResponseKey = "key" @@ -75,57 +87,81 @@ func TestAccResourceKibanaConnectorCasesWebhook(t *testing.T) { }) connector_type_id = ".cases-webhook" }`, - name) + name, idAttribute) + } + + for _, connectorID := range []string{"", uuid.NewString()} { + t.Run(fmt.Sprintf("with connector ID '%s'", connectorID), func(t *testing.T) { + minVersion := minSupportedVersion + if connectorID != "" { + minVersion = kibana.MinVersionSupportingPreconfiguredIDs + } + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + CheckDestroy: checkResourceKibanaConnectorDestroy, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minVersion), + Config: create(connectorName, connectorID), + Check: resource.ComposeTestCheckFunc( + testCommonAttributes(connectorName, ".cases-webhook"), + + resource.TestCheckResourceAttrWith("elasticstack_kibana_action_connector.test", "connector_id", func(value string) error { + if connectorID == "" { + if _, err := uuid.Parse(value); err != nil { + return fmt.Errorf("expected connector_id to be a uuid: %w", err) + } + + return nil + } + + if connectorID != value { + return fmt.Errorf("expected connector_id to match pre-defined id. '%s' != %s", connectorID, value) + } + + return nil + }), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentJson\":\"{}\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentResponseKey\":\"key\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentUrl\":\"https://www\.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentResponseExternalTitleKey\":\"title\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentUrl\":\"https://www\.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentJson\":\"{}\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentUrl\":\"https://www.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"viewIncidentUrl\":\"https://www\.elastic\.co/\"`)), + // `post` is the default value that is returned by backend + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`"createIncidentMethod\":\"post\"`)), + + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"user\":\"user1\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password1\"`)), + ), + }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minVersion), + Config: update(connectorName, connectorID), + Check: resource.ComposeTestCheckFunc( + testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".cases-webhook"), + + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentJson\":\"{}\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentResponseKey\":\"key\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentUrl\":\"https://www\.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentResponseExternalTitleKey\":\"title\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentUrl\":\"https://www\.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentJson\":\"{}\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentUrl\":\"https://elasticsearch\.com/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"viewIncidentUrl\":\"https://www\.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`createIncidentMethod\":\"put\"`)), + + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"user\":\"user2\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password2\"`)), + ), + }, + }, + }) + }) } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".cases-webhook"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentJson\":\"{}\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentResponseKey\":\"key\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentUrl\":\"https://www\.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentResponseExternalTitleKey\":\"title\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentUrl\":\"https://www\.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentJson\":\"{}\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentUrl\":\"https://www.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"viewIncidentUrl\":\"https://www\.elastic\.co/\"`)), - // `post` is the default value that is returned by backend - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`"createIncidentMethod\":\"post\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"user\":\"user1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password1\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".cases-webhook"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentJson\":\"{}\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentResponseKey\":\"key\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentUrl\":\"https://www\.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentResponseExternalTitleKey\":\"title\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentUrl\":\"https://www\.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentJson\":\"{}\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentUrl\":\"https://elasticsearch\.com/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"viewIncidentUrl\":\"https://www\.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`createIncidentMethod\":\"put\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"user\":\"user2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password2\"`)), - ), - }, - }, - }) } func TestAccResourceKibanaConnectorEmail(t *testing.T) { @@ -1549,13 +1585,18 @@ func checkResourceKibanaConnectorDestroy(s *terraform.State) error { return err } + oapiClient, err := client.GetKibanaOapiClient() + if err != nil { + return err + } + for _, rs := range s.RootModule().Resources { if rs.Type != "elasticstack_kibana_action_connector" { continue } compId, _ := clients.CompositeIdFromStr(rs.Primary.ID) - connector, diags := kibana.GetConnector(context.Background(), client, compId.ResourceId, compId.ClusterId) + connector, diags := kibana_oapi.GetConnector(context.Background(), oapiClient, compId.ResourceId, compId.ClusterId) if diags.HasError() { return fmt.Errorf("Failed to get connector: %v", diags) } From 9c9a969e2923f98bbc4e838f3e5f83ba94d5bee4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Aug 2025 09:23:26 +1000 Subject: [PATCH 30/66] Bump github.com/ulikunitz/xz from 0.5.12 to 0.5.14 (#1264) Bumps [github.com/ulikunitz/xz](https://github.com/ulikunitz/xz) from 0.5.12 to 0.5.14. - [Commits](https://github.com/ulikunitz/xz/compare/v0.5.12...v0.5.14) --- updated-dependencies: - dependency-name: github.com/ulikunitz/xz dependency-version: 0.5.14 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 4 ++-- go.sum | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/go.mod b/go.mod index 5a3b292f0..235a6dfc7 100644 --- a/go.mod +++ b/go.mod @@ -20,7 +20,6 @@ require ( github.com/hashicorp/terraform-plugin-mux v0.20.0 github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c - github.com/oapi-codegen/oapi-codegen/v2 v2.5.0 github.com/oapi-codegen/runtime v1.1.2 github.com/stretchr/testify v1.11.0 go.uber.org/mock v0.6.0 @@ -287,6 +286,7 @@ require ( github.com/multiformats/go-multibase v0.2.0 // indirect github.com/multiformats/go-multihash v0.2.3 // indirect github.com/multiformats/go-varint v0.0.7 // indirect + github.com/oapi-codegen/oapi-codegen/v2 v2.5.0 // indirect github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect github.com/oklog/run v1.1.0 // indirect @@ -339,7 +339,7 @@ require ( github.com/titanous/rocacheck v0.0.0-20171023193734-afe73141d399 // indirect github.com/tomnomnom/linkheader v0.0.0-20180905144013-02ca5825eb80 // indirect github.com/transparency-dev/merkle v0.0.2 // indirect - github.com/ulikunitz/xz v0.5.12 // indirect + github.com/ulikunitz/xz v0.5.14 // indirect github.com/vbatts/tar-split v0.12.1 // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack/v5 v5.4.1 // indirect diff --git a/go.sum b/go.sum index 6e0ebf2a9..ccc107a1d 100644 --- a/go.sum +++ b/go.sum @@ -1008,8 +1008,8 @@ github.com/transparency-dev/merkle v0.0.2 h1:Q9nBoQcZcgPamMkGn7ghV8XiTZ/kRxn1yCG github.com/transparency-dev/merkle v0.0.2/go.mod h1:pqSy+OXefQ1EDUVmAJ8MUhHB9TXGuzVAT58PqBoHz1A= github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= -github.com/ulikunitz/xz v0.5.12 h1:37Nm15o69RwBkXM0J6A5OlE67RZTfzUxTj8fB3dfcsc= -github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= +github.com/ulikunitz/xz v0.5.14 h1:uv/0Bq533iFdnMHZdRBTOlaNMdb1+ZxXIlHDZHIHcvg= +github.com/ulikunitz/xz v0.5.14/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/urfave/cli v1.22.10/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/vbatts/tar-split v0.12.1 h1:CqKoORW7BUWBe7UL/iqTVvkTBOF8UvOMKOIZykxnnbo= github.com/vbatts/tar-split v0.12.1/go.mod h1:eF6B6i6ftWQcDqEn3/iGFRFRo8cBIMSJVOpnNdfTMFA= From 38f7fdd83efe6d42fddc3f88d58e18d9d39e240c Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sat, 30 Aug 2025 15:05:13 +0000 Subject: [PATCH 31/66] chore(deps): update docker.elastic.co/kibana/kibana docker tag to v9.1.3 (#1263) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- libs/go-kibana-rest/docker-compose.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index f7e3a3af4..6e2478f2b 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -48,7 +48,7 @@ jobs: - 9200:9200 options: --health-cmd="curl http://localhost:9200/_cluster/health" --health-interval=10s --health-timeout=5s --health-retries=10 kibana: - image: docker.elastic.co/kibana/kibana:9.1.2@sha256:dea5d20df42e6833966deceadb10ecdbf85970b704d17b0abfc3b485622c1a08 + image: docker.elastic.co/kibana/kibana:9.1.3@sha256:26792c8e4a68ba0bff3efcc46755f60bf36bb16b2431014c210f2546ca1819ad env: SERVER_NAME: kibana ELASTICSEARCH_HOSTS: http://elasticsearch:9200 diff --git a/libs/go-kibana-rest/docker-compose.yml b/libs/go-kibana-rest/docker-compose.yml index 515a53e3c..5ac0162e0 100644 --- a/libs/go-kibana-rest/docker-compose.yml +++ b/libs/go-kibana-rest/docker-compose.yml @@ -11,7 +11,7 @@ services: ports: - "9200:9200/tcp" set-kibana-password: - image: docker.elastic.co/kibana/kibana:9.0.3@sha256:c4c00a485fbc3619d8373f3bc74e9dd5b5a34380ef50442be4366e8fb57cd50a + image: docker.elastic.co/kibana/kibana:9.1.3@sha256:26792c8e4a68ba0bff3efcc46755f60bf36bb16b2431014c210f2546ca1819ad restart: on-failure links: - elasticsearch @@ -22,7 +22,7 @@ services: elasticsearch: condition: service_started kibana: - image: docker.elastic.co/kibana/kibana:9.0.3@sha256:c4c00a485fbc3619d8373f3bc74e9dd5b5a34380ef50442be4366e8fb57cd50a + image: docker.elastic.co/kibana/kibana:9.1.3@sha256:26792c8e4a68ba0bff3efcc46755f60bf36bb16b2431014c210f2546ca1819ad environment: SERVER_NAME: kibana ELASTICSEARCH_HOSTS: http://es:9200 From 5a7623969dfa103a3e1cba36ac56ad762d3a4ed3 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sun, 31 Aug 2025 13:36:27 +0000 Subject: [PATCH 32/66] chore(deps): update golang:1.25.0 docker digest to 5502b0e (#1267) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .buildkite/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/release.yml b/.buildkite/release.yml index aee79f693..06d518b1e 100644 --- a/.buildkite/release.yml +++ b/.buildkite/release.yml @@ -1,7 +1,7 @@ steps: - label: Release agents: - image: "golang:1.25.0@sha256:9e56f0d0f043a68bb8c47c819e47dc29f6e8f5129b8885bed9d43f058f7f3ed6" + image: "golang:1.25.0@sha256:5502b0e56fca23feba76dbc5387ba59c593c02ccc2f0f7355871ea9a0852cebe" cpu: "16" memory: "24G" ephemeralStorage: "20G" From 36f0b42dc4454ef4ac13ff1544b9b93c5c263034 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Mon, 1 Sep 2025 23:10:38 +1000 Subject: [PATCH 33/66] Don't force replacement when changing integration versions (#1255) * Don't force replacement when changing integration versions * Changelog --- CHANGELOG.md | 1 + internal/fleet/integration/read.go | 1 - internal/fleet/integration/resource_test.go | 100 +++++++++++++++++++- internal/fleet/integration/schema.go | 3 - 4 files changed, 100 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01b9fadea..f95a29eaa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ - Add `slo_id` validation to `elasticstack_kibana_slo` ([#1221](https://github.com/elastic/terraform-provider-elasticstack/pull/1221)) - Add `ignore_missing_component_templates` to `elasticstack_elasticsearch_index_template` ([#1206](https://github.com/elastic/terraform-provider-elasticstack/pull/1206)) - Prevent provider panic when a script exists in state, but not in Elasticsearch ([#1218](https://github.com/elastic/terraform-provider-elasticstack/pull/1218)) +- Allow version changes without a destroy/create cycle with `elasticstack_fleet_integration` ([#1255](https://github.com/elastic/terraform-provider-elasticstack/pull/1255)). This fixes an issue where it was impossible to upgrade integrations which are used by an integration policy. - Add `namespace` attribute to `elasticstack_kibana_synthetics_monitor` resource to support setting data stream namespace independently from `space_id` ([#1247](https://github.com/elastic/terraform-provider-elasticstack/pull/1247)) ## [0.11.17] - 2025-07-21 diff --git a/internal/fleet/integration/read.go b/internal/fleet/integration/read.go index f8ebb0434..50fb9fe53 100644 --- a/internal/fleet/integration/read.go +++ b/internal/fleet/integration/read.go @@ -28,7 +28,6 @@ func (r *integrationResource) Read(ctx context.Context, req resource.ReadRequest pkg, diags := fleet.GetPackage(ctx, client, name, version) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { - resp.State.RemoveResource(ctx) return } if pkg.Status != nil && *pkg.Status != "installed" { diff --git a/internal/fleet/integration/resource_test.go b/internal/fleet/integration/resource_test.go index a2d540743..18a505d5a 100644 --- a/internal/fleet/integration/resource_test.go +++ b/internal/fleet/integration/resource_test.go @@ -2,6 +2,7 @@ package integration_test import ( "context" + "fmt" "regexp" "testing" @@ -10,11 +11,15 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients/fleet" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" + sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/stretchr/testify/require" ) -var minVersionIntegration = version.Must(version.NewVersion("8.6.0")) +var ( + minVersionIntegration = version.Must(version.NewVersion("8.6.0")) + minVersionIntegrationPolicy = version.Must(version.NewVersion("8.10.0")) +) func TestAccResourceIntegrationFromSDK(t *testing.T) { resource.Test(t, resource.TestCase{ @@ -72,6 +77,40 @@ func TestAccResourceIntegration(t *testing.T) { }) } +func TestAccResourceIntegrationWithPolicy(t *testing.T) { + policyName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minVersionIntegrationPolicy), + Config: testAccResourceIntegrationWithPolicy(policyName, "1.16.0"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_fleet_integration.test_integration", "name", "tcp"), + resource.TestCheckResourceAttr("elasticstack_fleet_integration.test_integration", "version", "1.16.0"), + ), + }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minVersionIntegrationPolicy), + Config: testAccResourceIntegrationWithPolicy(policyName, "1.17.0"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_fleet_integration.test_integration", "name", "tcp"), + resource.TestCheckResourceAttr("elasticstack_fleet_integration.test_integration", "version", "1.17.0"), + ), + }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minVersionIntegrationPolicy), + ResourceName: "elasticstack_fleet_integration.test_integration", + Config: testAccResourceIntegrationWithPolicy(policyName, "1.17.0"), + ImportState: true, + ImportStateVerify: true, + ExpectError: regexp.MustCompile("Resource Import Not Implemented"), + }, + }, + }) +} + func TestAccResourceIntegrationDeleted(t *testing.T) { resource.Test(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(t) }, @@ -122,6 +161,65 @@ resource "elasticstack_fleet_integration" "test_integration" { } ` +func testAccResourceIntegrationWithPolicy(policyName, version string) string { + return fmt.Sprintf(` +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +resource "elasticstack_fleet_integration" "test_integration" { + name = "tcp" + version = "%s" + force = true + skip_destroy = true +} + +// An agent policy to hold the integration policy. +resource "elasticstack_fleet_agent_policy" "sample" { + name = "%s" + namespace = "default" + description = "A sample agent policy" + monitor_logs = true + monitor_metrics = true + skip_destroy = false +} + +// The associated enrollment token. +data "elasticstack_fleet_enrollment_tokens" "sample" { + policy_id = elasticstack_fleet_agent_policy.sample.policy_id +} + +// The integration policy. +resource "elasticstack_fleet_integration_policy" "sample" { + name = "%s" + namespace = "default" + description = "A sample integration policy" + agent_policy_id = elasticstack_fleet_agent_policy.sample.policy_id + integration_name = elasticstack_fleet_integration.test_integration.name + integration_version = elasticstack_fleet_integration.test_integration.version + + input { + input_id = "tcp-tcp" + streams_json = jsonencode({ + "tcp.generic" : { + "enabled" : true, + "vars" : { + "listen_address" : "localhost", + "listen_port" : 8080, + "data_stream.dataset" : "tcp.generic", + "tags" : [], + "syslog_options" : "field: message\n#format: auto\n#timezone: Local\n", + "ssl" : "#certificate: |\n# -----BEGIN CERTIFICATE-----\n# ...\n# -----END CERTIFICATE-----\n#key: |\n# -----BEGIN PRIVATE KEY-----\n# ...\n# -----END PRIVATE KEY-----\n", + "custom" : "" + } + } + }) + } +} +`, version, policyName, policyName) +} + const testAccResourceIntegrationDeleted = ` provider "elasticstack" { elasticsearch {} diff --git a/internal/fleet/integration/schema.go b/internal/fleet/integration/schema.go index ef87f4a85..17ef3faf8 100644 --- a/internal/fleet/integration/schema.go +++ b/internal/fleet/integration/schema.go @@ -26,9 +26,6 @@ func (r *integrationResource) Schema(ctx context.Context, req resource.SchemaReq "version": schema.StringAttribute{ Description: "The integration package version.", Required: true, - PlanModifiers: []planmodifier.String{ - stringplanmodifier.RequiresReplace(), - }, }, "force": schema.BoolAttribute{ Description: "Set to true to force the requested action.", From 85cf3f4f3a5c884ee4968a5588e8a8a995c8fa24 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Mon, 1 Sep 2025 23:13:35 +1000 Subject: [PATCH 34/66] Add support for solution field in elasticsearch_kibana_space resource and data source (#1210) * Initial plan * Add solution field support to Kibana space resource and data source Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Update documentation and changelog for solution field support Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Add version checking for solution field in Kibana space resource Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Add version 8.18.3 to test matrix and SkipFunc for solution field tests Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Add Computed: true to disabled_features and solution fields in resource and data source Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Regenerate documentation using make docs-generate Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Use make target for acc test services --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: tobio <444668+tobio@users.noreply.github.com> Co-authored-by: Toby Brain --- .github/workflows/copilot-setup-steps.yml | 59 +------------------ .github/workflows/test.yml | 1 + CHANGELOG.md | 1 + docs/data-sources/kibana_spaces.md | 3 +- docs/resources/kibana_space.md | 1 + internal/kibana/space.go | 30 ++++++++++ internal/kibana/space_test.go | 27 +++++++++ internal/kibana/spaces/models.go | 1 + internal/kibana/spaces/read.go | 1 + internal/kibana/spaces/schema.go | 6 +- .../go-kibana-rest/kbapi/api.kibana_spaces.go | 1 + .../kbapi/api.kibana_spaces_test.go | 1 + 12 files changed, 74 insertions(+), 58 deletions(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index 6e2478f2b..cc08bc2b9 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -21,62 +21,6 @@ jobs: permissions: # If you want to clone the repository as part of your setup steps, for example to install dependencies, you'll need the `contents: read` permission. If you don't clone the repository in your setup steps, Copilot will do this for you automatically after the steps complete. contents: read - env: - ELASTICSEARCH_ENDPOINTS: "http://localhost:9200" - ELASTICSEARCH_USERNAME: "elastic" - ELASTICSEARCH_PASSWORD: password - KIBANA_ENDPOINT: "http://localhost:5601" - KIBANA_USERNAME: "elastic" - KIBANA_PASSWORD: password - KIBANA_SYSTEM_USERNAME: kibana_system - KIBANA_SYSTEM_PASSWORD: password - TF_ACC: "1" - services: - elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:9.1.2@sha256:d1a8016cf55be8ffec635ed69f5a9acb0c459db35b46a4549ec5b2847a2f170a - env: - discovery.type: single-node - xpack.security.enabled: true - xpack.security.authc.api_key.enabled: true - xpack.security.authc.token.enabled: true - xpack.watcher.enabled: true - xpack.license.self_generated.type: trial - repositories.url.allowed_urls: https://example.com/* - path.repo: /tmp - ELASTIC_PASSWORD: ${{ env.ELASTICSEARCH_PASSWORD }} - ports: - - 9200:9200 - options: --health-cmd="curl http://localhost:9200/_cluster/health" --health-interval=10s --health-timeout=5s --health-retries=10 - kibana: - image: docker.elastic.co/kibana/kibana:9.1.3@sha256:26792c8e4a68ba0bff3efcc46755f60bf36bb16b2431014c210f2546ca1819ad - env: - SERVER_NAME: kibana - ELASTICSEARCH_HOSTS: http://elasticsearch:9200 - ELASTICSEARCH_USERNAME: ${{ env.KIBANA_SYSTEM_USERNAME }} - ELASTICSEARCH_PASSWORD: ${{ env.KIBANA_SYSTEM_PASSWORD }} - XPACK_ENCRYPTEDSAVEDOBJECTS_ENCRYPTIONKEY: a7a6311933d3503b89bc2dbc36572c33a6c10925682e591bffcab6911c06786d - # LOGGING_ROOT_LEVEL: debug - ports: - - 5601:5601 - options: --health-cmd="curl http://localhost:5601/api/status" --health-interval=10s --health-timeout=5s --health-retries=10 - fleet: - image: docker.elastic.co/elastic-agent/elastic-agent:9.1.2@sha256:942aa0ffe94c268aab83881fc8be0ca0af079c395820ce8e7552f0ce97e0a760 - env: - SERVER_NAME: fleet - FLEET_ENROLL: "1" - FLEET_URL: https://fleet:8220 - FLEET_INSECURE: "true" - FLEET_SERVER_ENABLE: "1" - FLEET_SERVER_POLICY_ID: fleet-server - FLEET_SERVER_ELASTICSEARCH_HOST: http://elasticsearch:9200 - FLEET_SERVER_ELASTICSEARCH_INSECURE: "true" - FLEET_SERVER_INSECURE_HTTP: "true" - KIBANA_HOST: http://kibana:5601 - KIBANA_FLEET_SETUP: "1" - KIBANA_FLEET_PASSWORD: ${{ env.ELASTICSEARCH_PASSWORD }} - ports: - - 8220:8220 - options: --restart="unless-stopped" steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 @@ -88,6 +32,9 @@ jobs: with: terraform_wrapper: false + - name: Setup Elastic Stack + run: make docker-fleet + - name: Get dependencies run: make setup diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index a44ac8897..cf08fedc0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -126,6 +126,7 @@ jobs: - '8.15.5' - '8.16.2' - '8.17.0' + - '8.18.3' - '9.0.3' steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 diff --git a/CHANGELOG.md b/CHANGELOG.md index f95a29eaa..5885eb103 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,6 @@ ## [Unreleased] +- Add support for `solution` field in `elasticstack_kibana_space` resource and data source ([#1102](https://github.com/elastic/terraform-provider-elasticstack/issues/1102)) - Add `slo_id` validation to `elasticstack_kibana_slo` ([#1221](https://github.com/elastic/terraform-provider-elasticstack/pull/1221)) - Add `ignore_missing_component_templates` to `elasticstack_elasticsearch_index_template` ([#1206](https://github.com/elastic/terraform-provider-elasticstack/pull/1206)) - Prevent provider panic when a script exists in state, but not in Elasticsearch ([#1218](https://github.com/elastic/terraform-provider-elasticstack/pull/1218)) diff --git a/docs/data-sources/kibana_spaces.md b/docs/data-sources/kibana_spaces.md index 4bb1d0120..efcfdc282 100644 --- a/docs/data-sources/kibana_spaces.md +++ b/docs/data-sources/kibana_spaces.md @@ -41,11 +41,12 @@ Required: Optional: - `description` (String) The description for the space. -- `disabled_features` (List of String) The list of disabled features for the space. To get a list of available feature IDs, use the Features API (https://www.elastic.co/guide/en/kibana/master/features-api-get.html). - `image_url` (String) The data-URL encoded image to display in the space avatar. Read-Only: - `color` (String) The hexadecimal color code used in the space avatar. By default, the color is automatically generated from the space name. +- `disabled_features` (List of String) The list of disabled features for the space. To get a list of available feature IDs, use the Features API (https://www.elastic.co/guide/en/kibana/master/features-api-get.html). - `id` (String) Internal identifier of the resource. - `initials` (String) The initials shown in the space avatar. By default, the initials are automatically generated from the space name. Initials must be 1 or 2 characters. +- `solution` (String) The solution view for the space. Valid options are `security`, `oblt`, `es`, or `classic`. diff --git a/docs/resources/kibana_space.md b/docs/resources/kibana_space.md index 8dbca8e03..c8b1b30a1 100644 --- a/docs/resources/kibana_space.md +++ b/docs/resources/kibana_space.md @@ -41,6 +41,7 @@ resource "elasticstack_kibana_space" "example" { - `disabled_features` (Set of String) The list of disabled features for the space. To get a list of available feature IDs, use the Features API (https://www.elastic.co/guide/en/kibana/master/features-api-get.html). - `image_url` (String) The data-URL encoded image to display in the space avatar. - `initials` (String) The initials shown in the space avatar. By default, the initials are automatically generated from the space name. Initials must be 1 or 2 characters. +- `solution` (String) The solution view for the space. Valid options are `security`, `oblt`, `es`, or `classic`. ### Read-Only diff --git a/internal/kibana/space.go b/internal/kibana/space.go index 55ea8f7c8..908ea1358 100644 --- a/internal/kibana/space.go +++ b/internal/kibana/space.go @@ -6,11 +6,14 @@ import ( "github.com/disaster37/go-kibana-rest/v8/kbapi" "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/hashicorp/go-version" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +var SpaceSolutionMinVersion = version.Must(version.NewVersion("8.18.0")) + func ResourceSpace() *schema.Resource { apikeySchema := map[string]*schema.Schema{ "id": { @@ -38,6 +41,7 @@ func ResourceSpace() *schema.Resource { Description: "The list of disabled features for the space. To get a list of available feature IDs, use the Features API (https://www.elastic.co/guide/en/kibana/master/features-api-get.html).", Type: schema.TypeSet, Optional: true, + Computed: true, Elem: &schema.Schema{ Type: schema.TypeString, }, @@ -61,6 +65,13 @@ func ResourceSpace() *schema.Resource { Optional: true, ValidateFunc: validation.StringMatch(regexp.MustCompile("^data:image/"), "must be a valid data-URL encoded image"), }, + "solution": { + Description: "The solution view for the space. Valid options are `security`, `oblt`, `es`, or `classic`.", + Type: schema.TypeString, + Optional: true, + Computed: true, + ValidateFunc: validation.StringInSlice([]string{"security", "oblt", "es", "classic"}, false), + }, } return &schema.Resource{ @@ -90,6 +101,18 @@ func resourceSpaceUpsert(ctx context.Context, d *schema.ResourceData, meta inter return diag.FromErr(err) } + // Check version compatibility for solution field + if solution, ok := d.GetOk("solution"); ok && solution.(string) != "" { + serverVersion, diags := client.ServerVersion(ctx) + if diags.HasError() { + return diags + } + + if !serverVersion.GreaterThanOrEqual(SpaceSolutionMinVersion) { + return diag.Errorf("solution field is not supported in this version of the Elastic Stack. Solution field requires %s or higher", SpaceSolutionMinVersion) + } + } + space := kbapi.KibanaSpace{ ID: d.Get("space_id").(string), Name: d.Get("name").(string), @@ -120,6 +143,10 @@ func resourceSpaceUpsert(ctx context.Context, d *schema.ResourceData, meta inter space.ImageURL = imageUrl.(string) } + if solution, ok := d.GetOk("solution"); ok { + space.Solution = solution.(string) + } + var spaceResponse *kbapi.KibanaSpace if d.IsNewResource() { @@ -182,6 +209,9 @@ func resourceSpaceRead(ctx context.Context, d *schema.ResourceData, meta interfa if err := d.Set("color", space.Color); err != nil { return diag.FromErr(err) } + if err := d.Set("solution", space.Solution); err != nil { + return diag.FromErr(err) + } return diags } diff --git a/internal/kibana/space_test.go b/internal/kibana/space_test.go index 9a53286f0..2f3a2cb51 100644 --- a/internal/kibana/space_test.go +++ b/internal/kibana/space_test.go @@ -6,6 +6,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/kibana" + "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" @@ -39,6 +41,16 @@ func TestAccResourceSpace(t *testing.T) { resource.TestCheckResourceAttrSet("elasticstack_kibana_space.test_space", "image_url"), ), }, + { + Config: testAccResourceSpaceWithSolution(spaceId), + SkipFunc: versionutils.CheckIfVersionIsUnsupported(kibana.SpaceSolutionMinVersion), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_space.test_space", "space_id", spaceId), + resource.TestCheckResourceAttr("elasticstack_kibana_space.test_space", "name", fmt.Sprintf("Solution %s", spaceId)), + resource.TestCheckResourceAttr("elasticstack_kibana_space.test_space", "description", "Test Space with Solution"), + resource.TestCheckResourceAttr("elasticstack_kibana_space.test_space", "solution", "security"), + ), + }, { Config: testAccResourceSpaceCreate(spaceId), Check: resource.ComposeTestCheckFunc( @@ -83,6 +95,21 @@ resource "elasticstack_kibana_space" "test_space" { `, id, fmt.Sprintf("Updated %s", id)) } +func testAccResourceSpaceWithSolution(id string) string { + return fmt.Sprintf(` +provider "elasticstack" { + kibana {} +} + +resource "elasticstack_kibana_space" "test_space" { + space_id = "%s" + name = "%s" + description = "Test Space with Solution" + solution = "security" +} + `, id, fmt.Sprintf("Solution %s", id)) +} + func checkResourceSpaceDestroy(s *terraform.State) error { client, err := clients.NewAcceptanceTestingClient() if err != nil { diff --git a/internal/kibana/spaces/models.go b/internal/kibana/spaces/models.go index e7187566f..4503ddf95 100644 --- a/internal/kibana/spaces/models.go +++ b/internal/kibana/spaces/models.go @@ -17,4 +17,5 @@ type model struct { Initials types.String `tfsdk:"initials"` Color types.String `tfsdk:"color"` ImageUrl types.String `tfsdk:"image_url"` + Solution types.String `tfsdk:"solution"` } diff --git a/internal/kibana/spaces/read.go b/internal/kibana/spaces/read.go index ee7879a81..beaca5772 100644 --- a/internal/kibana/spaces/read.go +++ b/internal/kibana/spaces/read.go @@ -27,6 +27,7 @@ func (d *dataSource) Read(ctx context.Context, req datasource.ReadRequest, resp Initials: types.StringValue(space.Initials), Color: types.StringValue(space.Color), ImageUrl: types.StringValue(space.ImageURL), + Solution: types.StringValue(space.Solution), } disabledFeatures, diags := types.ListValueFrom(ctx, types.StringType, space.DisabledFeatures) diff --git a/internal/kibana/spaces/schema.go b/internal/kibana/spaces/schema.go index bb08c3958..4b45988a6 100644 --- a/internal/kibana/spaces/schema.go +++ b/internal/kibana/spaces/schema.go @@ -37,7 +37,7 @@ func (d *dataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp "disabled_features": schema.ListAttribute{ Description: "The list of disabled features for the space. To get a list of available feature IDs, use the Features API (https://www.elastic.co/guide/en/kibana/master/features-api-get.html).", ElementType: types.StringType, - Optional: true, + Computed: true, }, "initials": schema.StringAttribute{ Description: "The initials shown in the space avatar. By default, the initials are automatically generated from the space name. Initials must be 1 or 2 characters.", @@ -51,6 +51,10 @@ func (d *dataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp Description: "The data-URL encoded image to display in the space avatar.", Optional: true, }, + "solution": schema.StringAttribute{ + Description: "The solution view for the space. Valid options are `security`, `oblt`, `es`, or `classic`.", + Computed: true, + }, }, }, }, diff --git a/libs/go-kibana-rest/kbapi/api.kibana_spaces.go b/libs/go-kibana-rest/kbapi/api.kibana_spaces.go index 02d9d6374..9b1e7a9c7 100644 --- a/libs/go-kibana-rest/kbapi/api.kibana_spaces.go +++ b/libs/go-kibana-rest/kbapi/api.kibana_spaces.go @@ -23,6 +23,7 @@ type KibanaSpace struct { Initials string `json:"initials,omitempty"` Color string `json:"color,omitempty"` ImageURL string `json:"imageUrl,omitempty"` + Solution string `json:"solution,omitempty"` } // KibanaSpaces is the list of KibanaSpace object diff --git a/libs/go-kibana-rest/kbapi/api.kibana_spaces_test.go b/libs/go-kibana-rest/kbapi/api.kibana_spaces_test.go index c7a4f9a26..6c7f80e30 100644 --- a/libs/go-kibana-rest/kbapi/api.kibana_spaces_test.go +++ b/libs/go-kibana-rest/kbapi/api.kibana_spaces_test.go @@ -22,6 +22,7 @@ func (s *KBAPITestSuite) TestKibanaSpaces() { ID: "test", Name: "test", Description: "My test", + Solution: "security", } kibanaSpace, err = s.KibanaSpaces.Create(kibanaSpace) assert.NoError(s.T(), err) From 94dde4fc9bd0ae7d7019739033604521b099f59d Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Tue, 2 Sep 2025 11:49:21 +1000 Subject: [PATCH 35/66] Migrate Elasticsearch enrich policy resource and data source to Terraform Plugin Framework (#1220) * Initial plan * Implement Plugin Framework migration for enrich policy resource and data source Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Final validation - enrich policy migration complete and tested Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Address PR feedback: refactor code structure and remove old SDK implementation Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Fix code formatting for enrich policy data source Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Regenerate documentation for enrich policy migration Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Use jsontypes.NormalizedType for query field and extract duplicate read logic Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Refactor EnrichPolicy models to separate resource and data source types, fix test assertion Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Change indices and enrich_fields to SetAttribute, update data source description Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Update documentation after changing indices and enrich_fields to SetAttribute Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * PR feedback --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: tobio <444668+tobio@users.noreply.github.com> Co-authored-by: Toby Brain --- CHANGELOG.md | 1 + .../elasticsearch_enrich_policy.md | 26 ++- docs/resources/elasticsearch_enrich_policy.md | 4 +- internal/elasticsearch/enrich/acc_test.go | 200 ++++++++++++++++++ internal/elasticsearch/enrich/create.go | 94 ++++++++ internal/elasticsearch/enrich/data_source.go | 122 +++++++++++ internal/elasticsearch/enrich/delete.go | 34 +++ internal/elasticsearch/enrich/models.go | 49 +++++ internal/elasticsearch/enrich/policy.go | 182 ---------------- .../enrich/policy_data_source.go | 76 ------- .../enrich/policy_data_source_test.go | 68 ------ internal/elasticsearch/enrich/policy_test.go | 109 ---------- internal/elasticsearch/enrich/read.go | 53 +++++ internal/elasticsearch/enrich/resource.go | 127 +++++++++++ internal/utils/tfsdk.go | 63 ++++-- internal/utils/tfsdk_test.go | 58 +++++ provider/plugin_framework.go | 3 + provider/provider.go | 3 - 18 files changed, 808 insertions(+), 464 deletions(-) create mode 100644 internal/elasticsearch/enrich/acc_test.go create mode 100644 internal/elasticsearch/enrich/create.go create mode 100644 internal/elasticsearch/enrich/data_source.go create mode 100644 internal/elasticsearch/enrich/delete.go create mode 100644 internal/elasticsearch/enrich/models.go delete mode 100644 internal/elasticsearch/enrich/policy.go delete mode 100644 internal/elasticsearch/enrich/policy_data_source.go delete mode 100644 internal/elasticsearch/enrich/policy_data_source_test.go delete mode 100644 internal/elasticsearch/enrich/policy_test.go create mode 100644 internal/elasticsearch/enrich/read.go create mode 100644 internal/elasticsearch/enrich/resource.go diff --git a/CHANGELOG.md b/CHANGELOG.md index 5885eb103..94fbd16a6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,7 @@ - Add support for `solution` field in `elasticstack_kibana_space` resource and data source ([#1102](https://github.com/elastic/terraform-provider-elasticstack/issues/1102)) - Add `slo_id` validation to `elasticstack_kibana_slo` ([#1221](https://github.com/elastic/terraform-provider-elasticstack/pull/1221)) - Add `ignore_missing_component_templates` to `elasticstack_elasticsearch_index_template` ([#1206](https://github.com/elastic/terraform-provider-elasticstack/pull/1206)) +- Migrate `elasticstack_elasticsearch_enrich_policy` resource and data source to Terraform Plugin Framework ([#1220](https://github.com/elastic/terraform-provider-elasticstack/pull/1220)) - Prevent provider panic when a script exists in state, but not in Elasticsearch ([#1218](https://github.com/elastic/terraform-provider-elasticstack/pull/1218)) - Allow version changes without a destroy/create cycle with `elasticstack_fleet_integration` ([#1255](https://github.com/elastic/terraform-provider-elasticstack/pull/1255)). This fixes an issue where it was impossible to upgrade integrations which are used by an integration policy. - Add `namespace` attribute to `elasticstack_kibana_synthetics_monitor` resource to support setting data stream namespace independently from `space_id` ([#1247](https://github.com/elastic/terraform-provider-elasticstack/pull/1247)) diff --git a/docs/data-sources/elasticsearch_enrich_policy.md b/docs/data-sources/elasticsearch_enrich_policy.md index be4f00bb7..9c286ae92 100644 --- a/docs/data-sources/elasticsearch_enrich_policy.md +++ b/docs/data-sources/elasticsearch_enrich_policy.md @@ -74,11 +74,35 @@ output "query" { - `name` (String) The name of the policy. +### Optional + +- `elasticsearch_connection` (Block List, Deprecated) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch_connection)) + ### Read-Only - `enrich_fields` (Set of String) Fields to add to matching incoming documents. These fields must be present in the source indices. - `id` (String) Internal identifier of the resource - `indices` (Set of String) Array of one or more source indices used to create the enrich index. -- `match_field` (String) Field in source indices used to match incoming documents. +- `match_field` (String) Field from the source indices used to match incoming documents. - `policy_type` (String) The type of enrich policy, can be one of geo_match, match, range. - `query` (String) Query used to filter documents in the enrich index. The policy only uses documents matching this query to enrich incoming documents. Defaults to a match_all query. + + +### Nested Schema for `elasticsearch_connection` + +Optional: + +- `api_key` (String, Sensitive) API Key to use for authentication to Elasticsearch +- `bearer_token` (String, Sensitive) Bearer Token to use for authentication to Elasticsearch +- `ca_data` (String) PEM-encoded custom Certificate Authority certificate +- `ca_file` (String) Path to a custom Certificate Authority certificate +- `cert_data` (String) PEM encoded certificate for client auth +- `cert_file` (String) Path to a file containing the PEM encoded certificate for client auth +- `endpoints` (List of String, Sensitive) A list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number. +- `es_client_authentication` (String, Sensitive) ES Client Authentication field to be used with the JWT token +- `headers` (Map of String, Sensitive) A list of headers to be sent with each request to Elasticsearch. +- `insecure` (Boolean) Disable TLS certificate validation +- `key_data` (String, Sensitive) PEM encoded private key for client auth +- `key_file` (String) Path to a file containing the PEM encoded private key for client auth +- `password` (String, Sensitive) Password to use for API authentication to Elasticsearch. +- `username` (String) Username to use for API authentication to Elasticsearch. diff --git a/docs/resources/elasticsearch_enrich_policy.md b/docs/resources/elasticsearch_enrich_policy.md index 0fb926ffa..1845fc54e 100644 --- a/docs/resources/elasticsearch_enrich_policy.md +++ b/docs/resources/elasticsearch_enrich_policy.md @@ -58,13 +58,13 @@ resource "elasticstack_elasticsearch_enrich_policy" "policy1" { ### Optional -- `elasticsearch_connection` (Block List, Max: 1, Deprecated) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. (see [below for nested schema](#nestedblock--elasticsearch_connection)) +- `elasticsearch_connection` (Block List, Deprecated) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch_connection)) - `execute` (Boolean) Whether to call the execute API function in order to create the enrich index. - `query` (String) Query used to filter documents in the enrich index. The policy only uses documents matching this query to enrich incoming documents. Defaults to a match_all query. ### Read-Only -- `id` (String) The ID of this resource. +- `id` (String) Internal identifier of the resource ### Nested Schema for `elasticsearch_connection` diff --git a/internal/elasticsearch/enrich/acc_test.go b/internal/elasticsearch/enrich/acc_test.go new file mode 100644 index 000000000..cfbaf346e --- /dev/null +++ b/internal/elasticsearch/enrich/acc_test.go @@ -0,0 +1,200 @@ +package enrich_test + +import ( + "encoding/json" + "fmt" + "net/http" + "testing" + + "github.com/elastic/terraform-provider-elasticstack/internal/acctest" + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" +) + +func TestAccResourceEnrichPolicyFW(t *testing.T) { + name := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + CheckDestroy: checkEnrichPolicyDestroyFW(name), + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + Config: testAccEnrichPolicyFW(name), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "name", name), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "policy_type", "match"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "match_field", `email`), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "indices.0", name), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "enrich_fields.0", "first_name"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "enrich_fields.1", "last_name"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "query", "{\"match_all\": {}}\n"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "execute", "true"), + ), + }, + }, + }) +} + +func TestAccDataSourceEnrichPolicyFW(t *testing.T) { + name := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + Config: testAccEnrichPolicyDataSourceFW(name), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "name", name), + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "policy_type", "match"), + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "match_field", "email"), + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "indices.0", name), + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "enrich_fields.0", "first_name"), + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "enrich_fields.1", "last_name"), + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "query", "{\"match_all\":{}}"), + ), + }, + }, + }) +} + +func TestAccResourceEnrichPolicyFromSDK(t *testing.T) { + name := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + Steps: []resource.TestStep{ + { + // Create the enrich policy with the last provider version where the enrich policy resource was built on the SDK + ExternalProviders: map[string]resource.ExternalProvider{ + "elasticstack": { + Source: "elastic/elasticstack", + VersionConstraint: "0.11.17", + }, + }, + Config: testAccEnrichPolicyFW(name), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "name", name), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "policy_type", "match"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "execute", "true"), + ), + }, + { + ProtoV6ProviderFactories: acctest.Providers, + Config: testAccEnrichPolicyFW(name), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "name", name), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "policy_type", "match"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "execute", "true"), + ), + }, + }, + }) +} + +func testAccEnrichPolicyFW(name string) string { + return fmt.Sprintf(` +provider "elasticstack" { + elasticsearch {} +} + +resource "elasticstack_elasticsearch_index" "my_index" { + name = "%s" + + mappings = jsonencode({ + properties = { + email = { type = "text" } + first_name = { type = "text" } + last_name = { type = "text" } + } + }) + deletion_protection = false +} + +resource "elasticstack_elasticsearch_enrich_policy" "policy" { + name = "%s" + policy_type = "match" + indices = [elasticstack_elasticsearch_index.my_index.name] + match_field = "email" + enrich_fields = ["first_name", "last_name"] + query = <<-EOD + {"match_all": {}} + EOD +} + `, name, name) +} + +func testAccEnrichPolicyDataSourceFW(name string) string { + return fmt.Sprintf(` +provider "elasticstack" { + elasticsearch {} +} + +resource "elasticstack_elasticsearch_index" "my_index" { + name = "%s" + + mappings = jsonencode({ + properties = { + email = { type = "text" } + first_name = { type = "text" } + last_name = { type = "text" } + } + }) + deletion_protection = false +} + +resource "elasticstack_elasticsearch_enrich_policy" "policy" { + name = "%s" + policy_type = "match" + indices = [elasticstack_elasticsearch_index.my_index.name] + match_field = "email" + enrich_fields = ["first_name", "last_name"] + query = <<-EOD + {"match_all": {}} + EOD +} + +data "elasticstack_elasticsearch_enrich_policy" "test" { + name = elasticstack_elasticsearch_enrich_policy.policy.name +} + `, name, name) +} + +func checkEnrichPolicyDestroyFW(name string) func(s *terraform.State) error { + return func(s *terraform.State) error { + client, err := clients.NewAcceptanceTestingClient() + if err != nil { + return err + } + + for _, rs := range s.RootModule().Resources { + if rs.Type != "elasticstack_elasticsearch_enrich_policy" { + continue + } + compId, _ := clients.CompositeIdFromStr(rs.Primary.ID) + if compId.ResourceId != name { + return fmt.Errorf("Found unexpectedly enrich policy: %s", compId.ResourceId) + } + esClient, err := client.GetESClient() + if err != nil { + return err + } + req := esClient.EnrichGetPolicy.WithName(compId.ResourceId) + res, err := esClient.EnrichGetPolicy(req) + if err != nil { + return err + } + defer res.Body.Close() + if res.StatusCode == http.StatusFound { + var policiesResponse map[string]any + if err := json.NewDecoder(res.Body).Decode(&policiesResponse); err != nil { + return err + } + if len(policiesResponse["policies"].([]any)) != 0 { + return fmt.Errorf("Enrich policy (%s) still exists", compId.ResourceId) + } + } + } + return nil + } +} diff --git a/internal/elasticsearch/enrich/create.go b/internal/elasticsearch/enrich/create.go new file mode 100644 index 000000000..1f1f2e368 --- /dev/null +++ b/internal/elasticsearch/enrich/create.go @@ -0,0 +1,94 @@ +package enrich + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch" + "github.com/elastic/terraform-provider-elasticstack/internal/models" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func (r *enrichPolicyResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + diags := r.upsert(ctx, req.Plan, &resp.State) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +} + +func (r *enrichPolicyResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + diags := r.upsert(ctx, req.Plan, &resp.State) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } +} + +func (r *enrichPolicyResource) upsert(ctx context.Context, plan tfsdk.Plan, state *tfsdk.State) diag.Diagnostics { + var data EnrichPolicyDataWithExecute + var diags diag.Diagnostics + diags.Append(plan.Get(ctx, &data)...) + if diags.HasError() { + return diags + } + + policyName := data.Name.ValueString() + id, sdkDiags := r.client.ID(ctx, policyName) + diags.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) + if diags.HasError() { + return diags + } + + client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, data.ElasticsearchConnection, r.client) + diags.Append(diags...) + if diags.HasError() { + return diags + } + + // Convert framework types to model + indices := utils.SetTypeAs[string](ctx, data.Indices, path.Empty(), &diags) + if diags.HasError() { + return diags + } + + enrichFields := utils.SetTypeAs[string](ctx, data.EnrichFields, path.Empty(), &diags) + if diags.HasError() { + return diags + } + + policy := &models.EnrichPolicy{ + Type: data.PolicyType.ValueString(), + Name: policyName, + Indices: indices, + MatchField: data.MatchField.ValueString(), + EnrichFields: enrichFields, + } + + if !data.Query.IsNull() && !data.Query.IsUnknown() { + policy.Query = data.Query.ValueString() + } + + if sdkDiags := elasticsearch.PutEnrichPolicy(ctx, client, policy); sdkDiags.HasError() { + diags.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) + return diags + } + + data.Id = types.StringValue(id.String()) + + // Execute policy if requested + if !data.Execute.IsNull() && !data.Execute.IsUnknown() && data.Execute.ValueBool() { + if sdkDiags := elasticsearch.ExecuteEnrichPolicy(ctx, client, policyName); sdkDiags.HasError() { + diags.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) + return diags + } + } + + diags.Append(state.Set(ctx, &data)...) + return diags +} diff --git a/internal/elasticsearch/enrich/data_source.go b/internal/elasticsearch/enrich/data_source.go new file mode 100644 index 000000000..446754661 --- /dev/null +++ b/internal/elasticsearch/enrich/data_source.go @@ -0,0 +1,122 @@ +package enrich + +import ( + "context" + "fmt" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + + providerschema "github.com/elastic/terraform-provider-elasticstack/internal/schema" +) + +func NewEnrichPolicyDataSource() datasource.DataSource { + return &enrichPolicyDataSource{} +} + +type enrichPolicyDataSource struct { + client *clients.ApiClient +} + +func (d *enrichPolicyDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_elasticsearch_enrich_policy" +} + +func (d *enrichPolicyDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + client, diags := clients.ConvertProviderData(req.ProviderData) + resp.Diagnostics.Append(diags...) + d.client = client +} + +func (d *enrichPolicyDataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = GetDataSourceSchema() +} + +func GetDataSourceSchema() schema.Schema { + return schema.Schema{ + MarkdownDescription: "Returns information about an enrich policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html", + Blocks: map[string]schema.Block{ + "elasticsearch_connection": providerschema.GetEsFWConnectionBlock("elasticsearch_connection", false), + }, + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + MarkdownDescription: "Internal identifier of the resource", + Computed: true, + }, + "name": schema.StringAttribute{ + MarkdownDescription: "The name of the policy.", + Required: true, + }, + "policy_type": schema.StringAttribute{ + MarkdownDescription: "The type of enrich policy, can be one of geo_match, match, range.", + Computed: true, + }, + "indices": schema.SetAttribute{ + MarkdownDescription: "Array of one or more source indices used to create the enrich index.", + ElementType: types.StringType, + Computed: true, + }, + "match_field": schema.StringAttribute{ + MarkdownDescription: "Field from the source indices used to match incoming documents.", + Computed: true, + }, + "enrich_fields": schema.SetAttribute{ + MarkdownDescription: "Fields to add to matching incoming documents. These fields must be present in the source indices.", + ElementType: types.StringType, + Computed: true, + }, + "query": schema.StringAttribute{ + MarkdownDescription: "Query used to filter documents in the enrich index. The policy only uses documents matching this query to enrich incoming documents. Defaults to a match_all query.", + CustomType: jsontypes.NormalizedType{}, + Computed: true, + }, + }, + } +} + +func (d *enrichPolicyDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data EnrichPolicyData + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + policyName := data.Name.ValueString() + client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, data.ElasticsearchConnection, d.client) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + id, sdkDiags := client.ID(ctx, policyName) + resp.Diagnostics.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) + if resp.Diagnostics.HasError() { + return + } + data.Id = types.StringValue(id.String()) + + // Use the same read logic as the resource + policy, sdkDiags := elasticsearch.GetEnrichPolicy(ctx, client, policyName) + resp.Diagnostics.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) + if resp.Diagnostics.HasError() { + return + } + + if policy == nil { + resp.Diagnostics.AddError("Policy not found", fmt.Sprintf("Enrich policy '%s' not found", policyName)) + return + } + + // Convert model to framework types using shared function + data.populateFromPolicy(ctx, policy, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/internal/elasticsearch/enrich/delete.go b/internal/elasticsearch/enrich/delete.go new file mode 100644 index 000000000..aa249f6d0 --- /dev/null +++ b/internal/elasticsearch/enrich/delete.go @@ -0,0 +1,34 @@ +package enrich + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +func (r *enrichPolicyResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data EnrichPolicyDataWithExecute + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + compId, diags := clients.CompositeIdFromStrFw(data.Id.ValueString()) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + policyName := compId.ResourceId + + client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, data.ElasticsearchConnection, r.client) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + sdkDiags := elasticsearch.DeleteEnrichPolicy(ctx, client, policyName) + resp.Diagnostics.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) +} diff --git a/internal/elasticsearch/enrich/models.go b/internal/elasticsearch/enrich/models.go new file mode 100644 index 000000000..ca76c4216 --- /dev/null +++ b/internal/elasticsearch/enrich/models.go @@ -0,0 +1,49 @@ +package enrich + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/models" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +type EnrichPolicyData struct { + Id types.String `tfsdk:"id"` + ElasticsearchConnection types.List `tfsdk:"elasticsearch_connection"` + Name types.String `tfsdk:"name"` + PolicyType types.String `tfsdk:"policy_type"` + Indices types.Set `tfsdk:"indices"` + MatchField types.String `tfsdk:"match_field"` + EnrichFields types.Set `tfsdk:"enrich_fields"` + Query jsontypes.Normalized `tfsdk:"query"` +} + +type EnrichPolicyDataWithExecute struct { + EnrichPolicyData + Execute types.Bool `tfsdk:"execute"` +} + +// populateFromPolicy converts models.EnrichPolicy to EnrichPolicyData fields +func (data *EnrichPolicyData) populateFromPolicy(ctx context.Context, policy *models.EnrichPolicy, diagnostics *diag.Diagnostics) { + data.Name = types.StringValue(policy.Name) + data.PolicyType = types.StringValue(policy.Type) + data.MatchField = types.StringValue(policy.MatchField) + + if policy.Query != "" && policy.Query != "null" { + data.Query = jsontypes.NewNormalizedValue(policy.Query) + } else { + data.Query = jsontypes.NewNormalizedNull() + } + + // Convert string slices to Set + data.Indices = utils.SetValueFrom(ctx, policy.Indices, types.StringType, path.Empty(), diagnostics) + if diagnostics.HasError() { + return + } + + data.EnrichFields = utils.SetValueFrom(ctx, policy.EnrichFields, types.StringType, path.Empty(), diagnostics) +} diff --git a/internal/elasticsearch/enrich/policy.go b/internal/elasticsearch/enrich/policy.go deleted file mode 100644 index 16179c441..000000000 --- a/internal/elasticsearch/enrich/policy.go +++ /dev/null @@ -1,182 +0,0 @@ -package enrich - -import ( - "context" - "fmt" - - "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch" - "github.com/elastic/terraform-provider-elasticstack/internal/models" - "github.com/elastic/terraform-provider-elasticstack/internal/utils" - "github.com/hashicorp/terraform-plugin-log/tflog" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" -) - -func ResourceEnrichPolicy() *schema.Resource { - policySchema := map[string]*schema.Schema{ - "name": { - Description: "Name of the enrich policy to manage.", - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, - "policy_type": { - Description: "The type of enrich policy, can be one of geo_match, match, range.", - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.StringInSlice([]string{"geo_match", "match", "range"}, false), - }, - "indices": { - Description: "Array of one or more source indices used to create the enrich index.", - Type: schema.TypeSet, - Required: true, - ForceNew: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - "match_field": { - Description: "Field in source indices used to match incoming documents.", - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateFunc: validation.All( - validation.StringLenBetween(1, 255), - ), - }, - "enrich_fields": { - Description: "Fields to add to matching incoming documents. These fields must be present in the source indices.", - Type: schema.TypeSet, - Required: true, - ForceNew: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - "query": { - Description: "Query used to filter documents in the enrich index. The policy only uses documents matching this query to enrich incoming documents. Defaults to a match_all query.", - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateFunc: validation.StringIsJSON, - DiffSuppressFunc: utils.DiffJsonSuppress, - }, - "execute": { - Description: "Whether to call the execute API function in order to create the enrich index.", - Type: schema.TypeBool, - Optional: true, - ForceNew: true, - Default: true, - }, - } - - utils.AddConnectionSchema(policySchema) - - return &schema.Resource{ - Description: "Managing Elasticsearch enrich policies, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html", - - CreateContext: resourceEnrichPolicyPut, - UpdateContext: resourceEnrichPolicyPut, - ReadContext: resourceEnrichPolicyRead, - DeleteContext: resourceEnrichPolicyDelete, - - Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, - }, - - Schema: policySchema, - } -} - -func resourceEnrichPolicyRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - - compName, diags := clients.CompositeIdFromStr(d.Id()) - if diags.HasError() { - return diags - } - policy, diags := elasticsearch.GetEnrichPolicy(ctx, client, compName.ResourceId) - if policy == nil && diags == nil { - tflog.Warn(ctx, fmt.Sprintf(`Enrich policy "%s" not found, removing from state`, compName.ResourceId)) - d.SetId("") - return diags - } - if diags.HasError() { - return diags - } - - if err := d.Set("name", policy.Name); err != nil { - return diag.FromErr(err) - } - if err := d.Set("policy_type", policy.Type); err != nil { - return diag.FromErr(err) - } - if err := d.Set("indices", policy.Indices); err != nil { - return diag.FromErr(err) - } - if err := d.Set("match_field", policy.MatchField); err != nil { - return diag.FromErr(err) - } - if err := d.Set("enrich_fields", policy.EnrichFields); err != nil { - return diag.FromErr(err) - } - if err := d.Set("query", policy.Query); err != nil { - return diag.FromErr(err) - } - return diags -} - -func resourceEnrichPolicyPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - - name := d.Get("name").(string) - id, diags := client.ID(ctx, name) - if diags.HasError() { - return diags - } - policy := &models.EnrichPolicy{ - Type: d.Get("policy_type").(string), - Name: name, - Indices: utils.ExpandStringSet(d.Get("indices").(*schema.Set)), - MatchField: d.Get("match_field").(string), - EnrichFields: utils.ExpandStringSet(d.Get("enrich_fields").(*schema.Set)), - } - - if query, ok := d.GetOk("query"); ok { - policy.Query = query.(string) - } - - if diags = elasticsearch.PutEnrichPolicy(ctx, client, policy); diags.HasError() { - return diags - } - d.SetId(id.String()) - if d.Get("execute").(bool) { - diags := elasticsearch.ExecuteEnrichPolicy(ctx, client, name) - if diags.HasError() { - return diags - } - } - return resourceEnrichPolicyRead(ctx, d, meta) -} - -func resourceEnrichPolicyDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - - compName, diags := clients.CompositeIdFromStr(d.Id()) - if diags.HasError() { - return diags - } - return elasticsearch.DeleteEnrichPolicy(ctx, client, compName.ResourceId) -} diff --git a/internal/elasticsearch/enrich/policy_data_source.go b/internal/elasticsearch/enrich/policy_data_source.go deleted file mode 100644 index 975c50294..000000000 --- a/internal/elasticsearch/enrich/policy_data_source.go +++ /dev/null @@ -1,76 +0,0 @@ -package enrich - -import ( - "context" - - "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" -) - -func DataSourceEnrichPolicy() *schema.Resource { - policySchema := map[string]*schema.Schema{ - "id": { - Description: "Internal identifier of the resource", - Type: schema.TypeString, - Computed: true, - }, - "name": { - Description: "The name of the policy.", - Type: schema.TypeString, - Required: true, - }, - "policy_type": { - Description: "The type of enrich policy, can be one of geo_match, match, range.", - Type: schema.TypeString, - Computed: true, - }, - "indices": { - Description: "Array of one or more source indices used to create the enrich index.", - Type: schema.TypeSet, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - "match_field": { - Description: "Field in source indices used to match incoming documents.", - Type: schema.TypeString, - Computed: true, - }, - "enrich_fields": { - Description: "Fields to add to matching incoming documents. These fields must be present in the source indices.", - Type: schema.TypeSet, - Computed: true, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - }, - "query": { - Description: "Query used to filter documents in the enrich index. The policy only uses documents matching this query to enrich incoming documents. Defaults to a match_all query.", - Type: schema.TypeString, - Computed: true, - }, - } - - return &schema.Resource{ - Description: "Returns information about an enrich policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html", - ReadContext: dataSourceEnrichPolicyRead, - Schema: policySchema, - } -} - -func dataSourceEnrichPolicyRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - - policyId := d.Get("name").(string) - id, diags := client.ID(ctx, policyId) - if diags.HasError() { - return diags - } - d.SetId(id.String()) - return resourceEnrichPolicyRead(ctx, d, meta) -} diff --git a/internal/elasticsearch/enrich/policy_data_source_test.go b/internal/elasticsearch/enrich/policy_data_source_test.go deleted file mode 100644 index be51d1094..000000000 --- a/internal/elasticsearch/enrich/policy_data_source_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package enrich_test - -import ( - "fmt" - "testing" - - "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" -) - -func TestAccDataSourceEnrichPolicy(t *testing.T) { - name := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - Config: testAccEnrichPolicyDataSource(name), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "name", name), - resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "policy_type", "match"), - resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "match_field", "email"), - resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "indices.0", name), - resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "enrich_fields.0", "first_name"), - resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "enrich_fields.1", "last_name"), - resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_enrich_policy.test", "query", "{\"match_all\":{}}"), - ), - }, - }, - }) -} - -func testAccEnrichPolicyDataSource(name string) string { - return fmt.Sprintf(` -provider "elasticstack" { - elasticsearch {} -} - -resource "elasticstack_elasticsearch_index" "my_index" { - name = "%s" - - mappings = jsonencode({ - properties = { - email = { type = "text" } - first_name = { type = "text" } - last_name = { type = "text" } - } - }) - deletion_protection = false -} - -resource "elasticstack_elasticsearch_enrich_policy" "policy" { - name = "%s" - policy_type = "match" - indices = [elasticstack_elasticsearch_index.my_index.name] - match_field = "email" - enrich_fields = ["first_name", "last_name"] - query = <<-EOD - {"match_all": {}} - EOD -} - -data "elasticstack_elasticsearch_enrich_policy" "test" { - name = elasticstack_elasticsearch_enrich_policy.policy.name -} - `, name, name) -} diff --git a/internal/elasticsearch/enrich/policy_test.go b/internal/elasticsearch/enrich/policy_test.go deleted file mode 100644 index a33a6e3bb..000000000 --- a/internal/elasticsearch/enrich/policy_test.go +++ /dev/null @@ -1,109 +0,0 @@ -package enrich_test - -import ( - "encoding/json" - "fmt" - "net/http" - "testing" - - "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" -) - -func TestAccResourceEnrichPolicy(t *testing.T) { - name := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkEnrichPolicyDestroy(name), - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - Config: testAccEnrichPolicy(name), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "name", name), - resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "policy_type", "match"), - resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "match_field", `email`), - resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "indices.0", name), - resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "enrich_fields.0", "first_name"), - resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "enrich_fields.1", "last_name"), - resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "query", "{\"match_all\":{}}"), - resource.TestCheckResourceAttr("elasticstack_elasticsearch_enrich_policy.policy", "execute", "true"), - ), - }, - }, - }) -} - -func testAccEnrichPolicy(name string) string { - return fmt.Sprintf(` -provider "elasticstack" { - elasticsearch {} -} - -resource "elasticstack_elasticsearch_index" "my_index" { - name = "%s" - - mappings = jsonencode({ - properties = { - email = { type = "text" } - first_name = { type = "text" } - last_name = { type = "text" } - } - }) - deletion_protection = false -} - -resource "elasticstack_elasticsearch_enrich_policy" "policy" { - name = "%s" - policy_type = "match" - indices = [elasticstack_elasticsearch_index.my_index.name] - match_field = "email" - enrich_fields = ["first_name", "last_name"] - query = <<-EOD - {"match_all": {}} - EOD -} - `, name, name) -} - -func checkEnrichPolicyDestroy(name string) func(s *terraform.State) error { - return func(s *terraform.State) error { - client, err := clients.NewAcceptanceTestingClient() - if err != nil { - return err - } - - for _, rs := range s.RootModule().Resources { - if rs.Type != "elasticstack_elasticsearch_enrich_policy" { - continue - } - compId, _ := clients.CompositeIdFromStr(rs.Primary.ID) - if compId.ResourceId != name { - return fmt.Errorf("Found unexpectedly enrich policy: %s", compId.ResourceId) - } - esClient, err := client.GetESClient() - if err != nil { - return err - } - req := esClient.EnrichGetPolicy.WithName(compId.ResourceId) - res, err := esClient.EnrichGetPolicy(req) - if err != nil { - return err - } - defer res.Body.Close() - if res.StatusCode == http.StatusFound { - var policiesResponse map[string]any - if err := json.NewDecoder(res.Body).Decode(&policiesResponse); err != nil { - return err - } - if len(policiesResponse["policies"].([]any)) != 0 { - return fmt.Errorf("Enrich policy (%s) still exists", compId.ResourceId) - } - } - } - return nil - } -} diff --git a/internal/elasticsearch/enrich/read.go b/internal/elasticsearch/enrich/read.go new file mode 100644 index 000000000..7e7f366e9 --- /dev/null +++ b/internal/elasticsearch/enrich/read.go @@ -0,0 +1,53 @@ +package enrich + +import ( + "context" + "fmt" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-log/tflog" +) + +func (r *enrichPolicyResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data EnrichPolicyDataWithExecute + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + compId, diags := clients.CompositeIdFromStrFw(data.Id.ValueString()) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + policyName := compId.ResourceId + + client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, data.ElasticsearchConnection, r.client) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + policy, sdkDiags := elasticsearch.GetEnrichPolicy(ctx, client, policyName) + resp.Diagnostics.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) + if resp.Diagnostics.HasError() { + return + } + + if policy == nil { + tflog.Warn(ctx, fmt.Sprintf(`Enrich policy "%s" not found, removing from state`, policyName)) + resp.State.RemoveResource(ctx) + return + } + + // Convert model to framework types using shared function + data.populateFromPolicy(ctx, policy, &resp.Diagnostics) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/internal/elasticsearch/enrich/resource.go b/internal/elasticsearch/enrich/resource.go new file mode 100644 index 000000000..28da90c60 --- /dev/null +++ b/internal/elasticsearch/enrich/resource.go @@ -0,0 +1,127 @@ +package enrich + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework-validators/setvalidator" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/boolplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/setplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + providerschema "github.com/elastic/terraform-provider-elasticstack/internal/schema" +) + +func NewEnrichPolicyResource() resource.Resource { + return &enrichPolicyResource{} +} + +type enrichPolicyResource struct { + client *clients.ApiClient +} + +func (r *enrichPolicyResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_elasticsearch_enrich_policy" +} + +func (r *enrichPolicyResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + client, diags := clients.ConvertProviderData(req.ProviderData) + resp.Diagnostics.Append(diags...) + r.client = client +} + +func (r *enrichPolicyResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = GetResourceSchema() +} + +func GetResourceSchema() schema.Schema { + return schema.Schema{ + MarkdownDescription: "Managing Elasticsearch enrich policies. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html", + Blocks: map[string]schema.Block{ + "elasticsearch_connection": providerschema.GetEsFWConnectionBlock("elasticsearch_connection", false), + }, + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + MarkdownDescription: "Internal identifier of the resource", + Computed: true, + }, + "name": schema.StringAttribute{ + MarkdownDescription: "Name of the enrich policy to manage.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + Validators: []validator.String{ + stringvalidator.LengthBetween(1, 255), + }, + }, + "policy_type": schema.StringAttribute{ + MarkdownDescription: "The type of enrich policy, can be one of geo_match, match, range.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + Validators: []validator.String{ + stringvalidator.OneOf("geo_match", "match", "range"), + }, + }, + "indices": schema.SetAttribute{ + MarkdownDescription: "Array of one or more source indices used to create the enrich index.", + ElementType: types.StringType, + Required: true, + PlanModifiers: []planmodifier.Set{ + setplanmodifier.RequiresReplace(), + }, + Validators: []validator.Set{ + setvalidator.SizeAtLeast(1), + }, + }, + "match_field": schema.StringAttribute{ + MarkdownDescription: "Field in source indices used to match incoming documents.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + Validators: []validator.String{ + stringvalidator.LengthBetween(1, 255), + }, + }, + "enrich_fields": schema.SetAttribute{ + MarkdownDescription: "Fields to add to matching incoming documents. These fields must be present in the source indices.", + ElementType: types.StringType, + Required: true, + PlanModifiers: []planmodifier.Set{ + setplanmodifier.RequiresReplace(), + }, + Validators: []validator.Set{ + setvalidator.SizeAtLeast(1), + }, + }, + "query": schema.StringAttribute{ + MarkdownDescription: "Query used to filter documents in the enrich index. The policy only uses documents matching this query to enrich incoming documents. Defaults to a match_all query.", + Optional: true, + CustomType: jsontypes.NormalizedType{}, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "execute": schema.BoolAttribute{ + MarkdownDescription: "Whether to call the execute API function in order to create the enrich index.", + Optional: true, + Computed: true, + Default: booldefault.StaticBool(true), + PlanModifiers: []planmodifier.Bool{ + boolplanmodifier.RequiresReplace(), + }, + }, + }, + } +} diff --git a/internal/utils/tfsdk.go b/internal/utils/tfsdk.go index 275d467ea..eb594da44 100644 --- a/internal/utils/tfsdk.go +++ b/internal/utils/tfsdk.go @@ -12,6 +12,11 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types/basetypes" ) +type Elementable interface { + attr.Value + ElementsAs(ctx context.Context, target interface{}, allowUnhandled bool) diag.Diagnostics +} + type ListMeta struct { Context context.Context Index int @@ -46,6 +51,22 @@ func ValueStringPointer(value types.String) *string { return value.ValueStringPointer() } +// ================ +// ==== Generic === +// ================ + +// MapTypeAs converts a types.Map into a tfsdk aware map[string]T. +func elementsAs[T any](ctx context.Context, value Elementable, p path.Path, diags *diag.Diagnostics) T { + var result T + if !IsKnown(value) { + return result + } + + d := value.ElementsAs(ctx, &result, false) + diags.Append(ConvertToAttrDiags(d, p)...) + return result +} + // ================ // ===== Maps ===== // ================ @@ -107,14 +128,7 @@ func MapTypeToMap[T1 any, T2 any](ctx context.Context, value types.Map, p path.P // MapTypeAs converts a types.Map into a tfsdk aware map[string]T. func MapTypeAs[T any](ctx context.Context, value types.Map, p path.Path, diags *diag.Diagnostics) map[string]T { - if !IsKnown(value) { - return nil - } - - var items map[string]T - d := value.ElementsAs(ctx, &items, false) - diags.Append(ConvertToAttrDiags(d, p)...) - return items + return elementsAs[map[string]T](ctx, value, p, diags) } // MapValueFrom converts a tfsdk aware map[string]T to a types.Map. @@ -145,10 +159,7 @@ func SliceToListType[T1 any, T2 any](ctx context.Context, value []T1, elemType a // SliceToListType_String converts a tfsdk naive []string into a types.List. // This is a shorthand SliceToListType helper for strings. func SliceToListType_String(ctx context.Context, value []string, p path.Path, diags *diag.Diagnostics) types.List { - return SliceToListType(ctx, value, types.StringType, p, diags, - func(item string, meta ListMeta) types.String { - return types.StringValue(item) - }) + return ListValueFrom(ctx, value, types.StringType, p, diags) } // ListTypeToMap converts a types.List first into a tfsdk aware map[string]T1 @@ -184,21 +195,12 @@ func ListTypeToSlice[T1 any, T2 any](ctx context.Context, value types.List, p pa // ListTypeToSlice_String converts a types.List into a []string. // This is a shorthand ListTypeToSlice helper for strings. func ListTypeToSlice_String(ctx context.Context, value types.List, p path.Path, diags *diag.Diagnostics) []string { - return ListTypeToSlice(ctx, value, p, diags, func(item types.String, meta ListMeta) string { - return item.ValueString() - }) + return ListTypeAs[string](ctx, value, p, diags) } // ListTypeAs converts a types.List into a tfsdk aware []T. func ListTypeAs[T any](ctx context.Context, value types.List, p path.Path, diags *diag.Diagnostics) []T { - if !IsKnown(value) { - return nil - } - - var items []T - nd := value.ElementsAs(ctx, &items, false) - diags.Append(ConvertToAttrDiags(nd, p)...) - return items + return elementsAs[[]T](ctx, value, p, diags) } // ListValueFrom converts a tfsdk aware []T to a types.List. @@ -208,6 +210,21 @@ func ListValueFrom[T any](ctx context.Context, value []T, elemType attr.Type, p return list } +// =================== +// ===== Sets ===== +// =================== + +// SetTypeAs converts a types.Set into a tfsdk aware []T. +func SetTypeAs[T any](ctx context.Context, value types.Set, p path.Path, diags *diag.Diagnostics) []T { + return elementsAs[[]T](ctx, value, p, diags) +} + +func SetValueFrom[T any](ctx context.Context, value []T, elemType attr.Type, p path.Path, diags *diag.Diagnostics) types.Set { + list, d := types.SetValueFrom(ctx, elemType, value) + diags.Append(ConvertToAttrDiags(d, p)...) + return list +} + // =================== // ===== Objects ===== // =================== diff --git a/internal/utils/tfsdk_test.go b/internal/utils/tfsdk_test.go index 349cc8f8a..33aa3a5f1 100644 --- a/internal/utils/tfsdk_test.go +++ b/internal/utils/tfsdk_test.go @@ -100,6 +100,15 @@ var ( types.StringValue("v3"), }) + awareSetUnk = types.SetUnknown(awareType) + awareSetNil = types.SetNull(awareType) + awareSetEmpty = types.SetValueMust(awareType, []attr.Value{}) + awareSetFull = types.SetValueMust(awareType, []attr.Value{ + types.ObjectValueMust(awareType.AttrTypes, map[string]attr.Value{"id": types.StringValue("id1")}), + types.ObjectValueMust(awareType.AttrTypes, map[string]attr.Value{"id": types.StringValue("id2")}), + types.ObjectValueMust(awareType.AttrTypes, map[string]attr.Value{"id": types.StringValue("id3")}), + }) + normUnk = jsontypes.NewNormalizedUnknown() normNil = jsontypes.NewNormalizedNull() normEmpty = jsontypes.NewNormalizedValue(`{}`) @@ -699,3 +708,52 @@ func TestTransformMapToSlice(t *testing.T) { }) } } + +// Sets + +func TestSetTypeAs(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input types.Set + want []aware + }{ + {name: "converts unknown", input: awareSetUnk, want: awareSliceNil}, + {name: "converts nil", input: awareSetNil, want: awareSliceNil}, + {name: "converts empty", input: awareSetEmpty, want: awareSliceEmpty}, + {name: "converts struct", input: awareSetFull, want: awareSliceFull}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var diags diag.Diagnostics + got := utils.SetTypeAs[aware](context.Background(), tt.input, path.Empty(), &diags) + require.Equal(t, tt.want, got) + require.Empty(t, diags) + }) + } +} + +func TestSetValueFrom(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input []aware + want types.Set + }{ + {name: "converts nil", input: awareSliceNil, want: awareSetNil}, + {name: "converts empty", input: awareSliceEmpty, want: awareSetEmpty}, + {name: "converts struct", input: awareSliceFull, want: awareSetFull}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var diags diag.Diagnostics + got := utils.SetValueFrom(context.Background(), tt.input, awareType, path.Empty(), &diags) + require.Equal(t, tt.want, got) + require.Empty(t, diags) + }) + } +} diff --git a/provider/plugin_framework.go b/provider/plugin_framework.go index 1da87c988..d7031c9ce 100644 --- a/provider/plugin_framework.go +++ b/provider/plugin_framework.go @@ -6,6 +6,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/apm/agent_configuration" "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/clients/config" + "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/enrich" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index/data_stream_lifecycle" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index/index" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index/indices" @@ -86,6 +87,7 @@ func (p *Provider) DataSources(ctx context.Context) []func() datasource.DataSour spaces.NewDataSource, enrollment_tokens.NewDataSource, integration_ds.NewDataSource, + enrich.NewEnrichPolicyDataSource, } } @@ -106,5 +108,6 @@ func (p *Provider) Resources(ctx context.Context) []func() resource.Resource { output.NewResource, server_host.NewResource, system_user.NewSystemUserResource, + enrich.NewEnrichPolicyResource, } } diff --git a/provider/provider.go b/provider/provider.go index f5bc7fba7..2cd6441ca 100644 --- a/provider/provider.go +++ b/provider/provider.go @@ -3,7 +3,6 @@ package provider import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/cluster" - "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/enrich" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/ingest" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/logstash" @@ -78,7 +77,6 @@ func New(version string) *schema.Provider { "elasticstack_elasticsearch_security_user": security.DataSourceUser(), "elasticstack_elasticsearch_snapshot_repository": cluster.DataSourceSnapshotRespository(), "elasticstack_elasticsearch_info": cluster.DataSourceClusterInfo(), - "elasticstack_elasticsearch_enrich_policy": enrich.DataSourceEnrichPolicy(), "elasticstack_kibana_action_connector": kibana.DataSourceConnector(), "elasticstack_kibana_security_role": kibana.DataSourceRole(), @@ -97,7 +95,6 @@ func New(version string) *schema.Provider { "elasticstack_elasticsearch_snapshot_lifecycle": cluster.ResourceSlm(), "elasticstack_elasticsearch_snapshot_repository": cluster.ResourceSnapshotRepository(), "elasticstack_elasticsearch_script": cluster.ResourceScript(), - "elasticstack_elasticsearch_enrich_policy": enrich.ResourceEnrichPolicy(), "elasticstack_elasticsearch_transform": transform.ResourceTransform(), "elasticstack_elasticsearch_watch": watcher.ResourceWatch(), From 91ee184a00ad47b3a76adfdbbf9afc04ba2c335a Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Tue, 2 Sep 2025 13:28:30 +1000 Subject: [PATCH 36/66] Migrate to terraform-plugin-testing (#1256) --- go.mod | 1 + go.sum | 2 ++ internal/acctest/checks/resource_list.go | 2 +- internal/acctest/checks/user_auth.go | 2 +- internal/apm/agent_configuration/resource_test.go | 4 ++-- .../cluster/cluster_info_data_source_test.go | 2 +- internal/elasticsearch/cluster/script_test.go | 6 +++--- internal/elasticsearch/cluster/settings_test.go | 4 ++-- internal/elasticsearch/cluster/slm_test.go | 6 +++--- .../cluster/snapshot_repository_data_source_test.go | 4 ++-- .../elasticsearch/cluster/snapshot_repository_test.go | 6 +++--- .../elasticsearch/index/component_template_test.go | 6 +++--- .../index/data_stream_lifecycle/acc_test.go | 6 +++--- internal/elasticsearch/index/data_stream_test.go | 6 +++--- internal/elasticsearch/index/ilm_test.go | 6 +++--- internal/elasticsearch/index/index/acc_test.go | 6 +++--- .../elasticsearch/index/indices/data_source_test.go | 2 +- .../elasticsearch/index/template_data_source_test.go | 4 ++-- internal/elasticsearch/index/template_test.go | 6 +++--- internal/elasticsearch/ingest/commons_test.go | 4 ++-- internal/elasticsearch/ingest/pipeline_test.go | 6 +++--- .../ingest/processor_append_data_source_test.go | 2 +- .../ingest/processor_bytes_data_source_test.go | 2 +- .../ingest/processor_circle_data_source_test.go | 2 +- .../ingest/processor_community_id_data_source_test.go | 2 +- .../ingest/processor_convert_data_source_test.go | 2 +- .../ingest/processor_csv_data_source_test.go | 2 +- .../ingest/processor_date_data_source_test.go | 2 +- .../processor_date_index_name_data_source_test.go | 2 +- .../ingest/processor_dissect_data_source_test.go | 2 +- .../ingest/processor_dot_expander_data_source_test.go | 2 +- .../ingest/processor_drop_data_source_test.go | 2 +- .../ingest/processor_fail_data_source_test.go | 2 +- .../ingest/processor_fingerprint_data_source_test.go | 2 +- .../ingest/processor_foreach_data_source_test.go | 2 +- .../ingest/processor_geoip_data_source_test.go | 2 +- .../ingest/processor_grok_data_source_test.go | 2 +- .../ingest/processor_gsub_data_source_test.go | 2 +- .../ingest/processor_html_strip_data_source_test.go | 2 +- .../ingest/processor_join_data_source_test.go | 2 +- .../ingest/processor_json_data_source_test.go | 2 +- .../ingest/processor_kv_data_source_test.go | 2 +- .../ingest/processor_lowercase_data_source_test.go | 2 +- .../processor_network_direction_data_source_test.go | 2 +- .../ingest/processor_pipeline_data_source_test.go | 2 +- .../processor_registered_domain_data_source_test.go | 2 +- .../ingest/processor_remove_data_source_test.go | 2 +- .../ingest/processor_rename_data_source_test.go | 2 +- .../ingest/processor_reroute_data_source_test.go | 2 +- .../ingest/processor_script_data_source_test.go | 2 +- .../ingest/processor_set_data_source_test.go | 2 +- .../processor_set_security_user_data_source_test.go | 2 +- .../ingest/processor_sort_data_source_test.go | 2 +- .../ingest/processor_split_data_source_test.go | 2 +- .../ingest/processor_trim_data_source_test.go | 2 +- .../ingest/processor_uppercase_data_source_test.go | 2 +- .../ingest/processor_uri_parts_data_source_test.go | 2 +- .../ingest/processor_urldecode_data_source_test.go | 2 +- .../ingest/processor_user_agent_data_source_test.go | 2 +- internal/elasticsearch/logstash/pipeline_test.go | 6 +++--- internal/elasticsearch/security/api_key/acc_test.go | 11 +++++------ .../elasticsearch/security/role_data_source_test.go | 2 +- .../security/role_mapping_data_source_test.go | 2 +- internal/elasticsearch/security/role_mapping_test.go | 6 +++--- internal/elasticsearch/security/role_test.go | 6 +++--- .../elasticsearch/security/system_user/acc_test.go | 2 +- .../elasticsearch/security/user_data_source_test.go | 2 +- internal/elasticsearch/security/user_test.go | 8 ++++---- internal/elasticsearch/transform/transform_test.go | 6 +++--- internal/elasticsearch/watcher/watch_test.go | 6 +++--- internal/fleet/agent_policy/resource_test.go | 6 +++--- internal/fleet/enrollment_tokens/data_source_test.go | 4 ++-- internal/fleet/integration/resource_test.go | 4 ++-- internal/fleet/integration_ds/data_source_test.go | 4 ++-- internal/fleet/integration_policy/resource_test.go | 6 +++--- internal/fleet/output/resource_test.go | 6 +++--- internal/fleet/server_host/resource_test.go | 6 +++--- internal/kibana/alerting_test.go | 6 +++--- internal/kibana/connector_data_source_test.go | 2 +- internal/kibana/connector_test.go | 6 +++--- internal/kibana/data_view/acc_test.go | 4 ++-- internal/kibana/import_saved_objects/acc_test.go | 2 +- internal/kibana/role_data_source_test.go | 2 +- internal/kibana/role_test.go | 6 +++--- internal/kibana/slo_test.go | 9 ++++----- internal/kibana/space_test.go | 6 +++--- internal/kibana/spaces/data_source_test.go | 2 +- internal/kibana/synthetics/acc_pl_test.go | 2 +- internal/kibana/synthetics/acc_test.go | 4 ++-- internal/kibana/synthetics/parameter/resource_test.go | 2 +- provider/factory_test.go | 2 +- provider/provider_test.go | 4 ++-- 92 files changed, 160 insertions(+), 159 deletions(-) diff --git a/go.mod b/go.mod index 235a6dfc7..fa7f690c4 100644 --- a/go.mod +++ b/go.mod @@ -19,6 +19,7 @@ require ( github.com/hashicorp/terraform-plugin-log v0.9.0 github.com/hashicorp/terraform-plugin-mux v0.20.0 github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 + github.com/hashicorp/terraform-plugin-testing v1.13.3 github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c github.com/oapi-codegen/runtime v1.1.2 github.com/stretchr/testify v1.11.0 diff --git a/go.sum b/go.sum index ccc107a1d..a75eb60ec 100644 --- a/go.sum +++ b/go.sum @@ -621,6 +621,8 @@ github.com/hashicorp/terraform-plugin-mux v0.20.0 h1:3QpBnI9uCuL0Yy2Rq/kR9cOdmOF github.com/hashicorp/terraform-plugin-mux v0.20.0/go.mod h1:wSIZwJjSYk86NOTX3fKUlThMT4EAV1XpBHz9SAvjQr4= github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0 h1:NFPMacTrY/IdcIcnUB+7hsore1ZaRWU9cnB6jFoBnIM= github.com/hashicorp/terraform-plugin-sdk/v2 v2.37.0/go.mod h1:QYmYnLfsosrxjCnGY1p9c7Zj6n9thnEE+7RObeYs3fA= +github.com/hashicorp/terraform-plugin-testing v1.13.3 h1:QLi/khB8Z0a5L54AfPrHukFpnwsGL8cwwswj4RZduCo= +github.com/hashicorp/terraform-plugin-testing v1.13.3/go.mod h1:WHQ9FDdiLoneey2/QHpGM/6SAYf4A7AZazVg7230pLE= github.com/hashicorp/terraform-registry-address v0.2.5 h1:2GTftHqmUhVOeuu9CW3kwDkRe4pcBDq0uuK5VJngU1M= github.com/hashicorp/terraform-registry-address v0.2.5/go.mod h1:PpzXWINwB5kuVS5CA7m1+eO2f1jKb5ZDIxrOPfpnGkg= github.com/hashicorp/terraform-svchost v0.1.1 h1:EZZimZ1GxdqFRinZ1tpJwVxxt49xc/S52uzrw4x0jKQ= diff --git a/internal/acctest/checks/resource_list.go b/internal/acctest/checks/resource_list.go index c488c1677..c6439fb4a 100644 --- a/internal/acctest/checks/resource_list.go +++ b/internal/acctest/checks/resource_list.go @@ -3,7 +3,7 @@ package checks import ( "fmt" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestCheckResourceListAttr(name, key string, values []string) resource.TestCheckFunc { diff --git a/internal/acctest/checks/user_auth.go b/internal/acctest/checks/user_auth.go index 5200ebf0a..adeae7c2f 100644 --- a/internal/acctest/checks/user_auth.go +++ b/internal/acctest/checks/user_auth.go @@ -6,7 +6,7 @@ import ( "io" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func CheckUserCanAuthenticate(username string, password string) func(*terraform.State) error { diff --git a/internal/apm/agent_configuration/resource_test.go b/internal/apm/agent_configuration/resource_test.go index 4f9e0a828..0f292a2dc 100644 --- a/internal/apm/agent_configuration/resource_test.go +++ b/internal/apm/agent_configuration/resource_test.go @@ -5,8 +5,8 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - tf_acctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + tf_acctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccResourceAgentConfiguration(t *testing.T) { diff --git a/internal/elasticsearch/cluster/cluster_info_data_source_test.go b/internal/elasticsearch/cluster/cluster_info_data_source_test.go index 80b256bab..f8a981917 100644 --- a/internal/elasticsearch/cluster/cluster_info_data_source_test.go +++ b/internal/elasticsearch/cluster/cluster_info_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceClusterInfo(t *testing.T) { diff --git a/internal/elasticsearch/cluster/script_test.go b/internal/elasticsearch/cluster/script_test.go index eb31cf320..f2d98cee5 100644 --- a/internal/elasticsearch/cluster/script_test.go +++ b/internal/elasticsearch/cluster/script_test.go @@ -6,9 +6,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/stretchr/testify/require" ) diff --git a/internal/elasticsearch/cluster/settings_test.go b/internal/elasticsearch/cluster/settings_test.go index 1c604e60c..1edade951 100644 --- a/internal/elasticsearch/cluster/settings_test.go +++ b/internal/elasticsearch/cluster/settings_test.go @@ -7,8 +7,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceClusterSettings(t *testing.T) { diff --git a/internal/elasticsearch/cluster/slm_test.go b/internal/elasticsearch/cluster/slm_test.go index ba1c46d38..49bb1bd11 100644 --- a/internal/elasticsearch/cluster/slm_test.go +++ b/internal/elasticsearch/cluster/slm_test.go @@ -6,9 +6,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceSLM(t *testing.T) { diff --git a/internal/elasticsearch/cluster/snapshot_repository_data_source_test.go b/internal/elasticsearch/cluster/snapshot_repository_data_source_test.go index 5aaa64f1c..9db617264 100644 --- a/internal/elasticsearch/cluster/snapshot_repository_data_source_test.go +++ b/internal/elasticsearch/cluster/snapshot_repository_data_source_test.go @@ -5,8 +5,8 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceSnapRepoMissing(t *testing.T) { diff --git a/internal/elasticsearch/cluster/snapshot_repository_test.go b/internal/elasticsearch/cluster/snapshot_repository_test.go index 87352cf1a..a51262eef 100644 --- a/internal/elasticsearch/cluster/snapshot_repository_test.go +++ b/internal/elasticsearch/cluster/snapshot_repository_test.go @@ -6,9 +6,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceSnapRepoFs(t *testing.T) { diff --git a/internal/elasticsearch/index/component_template_test.go b/internal/elasticsearch/index/component_template_test.go index 70dfb3ed2..ca8b603cf 100644 --- a/internal/elasticsearch/index/component_template_test.go +++ b/internal/elasticsearch/index/component_template_test.go @@ -6,9 +6,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceComponentTemplate(t *testing.T) { diff --git a/internal/elasticsearch/index/data_stream_lifecycle/acc_test.go b/internal/elasticsearch/index/data_stream_lifecycle/acc_test.go index 708ad1600..1575ab95b 100644 --- a/internal/elasticsearch/index/data_stream_lifecycle/acc_test.go +++ b/internal/elasticsearch/index/data_stream_lifecycle/acc_test.go @@ -11,9 +11,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index/data_stream_lifecycle" "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceDataStreamLifecycle(t *testing.T) { diff --git a/internal/elasticsearch/index/data_stream_test.go b/internal/elasticsearch/index/data_stream_test.go index 27c5e0511..7b7b63e43 100644 --- a/internal/elasticsearch/index/data_stream_test.go +++ b/internal/elasticsearch/index/data_stream_test.go @@ -6,9 +6,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceDataStream(t *testing.T) { diff --git a/internal/elasticsearch/index/ilm_test.go b/internal/elasticsearch/index/ilm_test.go index e22d55047..2e9d8e84f 100644 --- a/internal/elasticsearch/index/ilm_test.go +++ b/internal/elasticsearch/index/ilm_test.go @@ -9,9 +9,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) var totalShardsPerNodeVersionLimit = version.Must(version.NewVersion("7.16.0")) diff --git a/internal/elasticsearch/index/index/acc_test.go b/internal/elasticsearch/index/index/acc_test.go index 1862ff9e5..f0bfacff8 100644 --- a/internal/elasticsearch/index/index/acc_test.go +++ b/internal/elasticsearch/index/index/acc_test.go @@ -7,9 +7,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceIndex(t *testing.T) { diff --git a/internal/elasticsearch/index/indices/data_source_test.go b/internal/elasticsearch/index/indices/data_source_test.go index 43614364e..3d12c5c50 100644 --- a/internal/elasticsearch/index/indices/data_source_test.go +++ b/internal/elasticsearch/index/indices/data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccIndicesDataSource(t *testing.T) { diff --git a/internal/elasticsearch/index/template_data_source_test.go b/internal/elasticsearch/index/template_data_source_test.go index 6cc4aa0cf..63f4f3e72 100644 --- a/internal/elasticsearch/index/template_data_source_test.go +++ b/internal/elasticsearch/index/template_data_source_test.go @@ -7,8 +7,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccIndexTemplateDataSource(t *testing.T) { diff --git a/internal/elasticsearch/index/template_test.go b/internal/elasticsearch/index/template_test.go index 7e0ffd8f2..19951ab59 100644 --- a/internal/elasticsearch/index/template_test.go +++ b/internal/elasticsearch/index/template_test.go @@ -8,9 +8,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceIndexTemplate(t *testing.T) { diff --git a/internal/elasticsearch/ingest/commons_test.go b/internal/elasticsearch/ingest/commons_test.go index e75215885..6a47243c0 100644 --- a/internal/elasticsearch/ingest/commons_test.go +++ b/internal/elasticsearch/ingest/commons_test.go @@ -4,8 +4,8 @@ import ( "fmt" "github.com/elastic/terraform-provider-elasticstack/internal/utils" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) // check if the provided json string equal to the generated one diff --git a/internal/elasticsearch/ingest/pipeline_test.go b/internal/elasticsearch/ingest/pipeline_test.go index 6dac7fa5e..b5db8de67 100644 --- a/internal/elasticsearch/ingest/pipeline_test.go +++ b/internal/elasticsearch/ingest/pipeline_test.go @@ -6,9 +6,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceIngestPipeline(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_append_data_source_test.go b/internal/elasticsearch/ingest/processor_append_data_source_test.go index 4dbb95cda..2d31a881e 100644 --- a/internal/elasticsearch/ingest/processor_append_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_append_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorAppend(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_bytes_data_source_test.go b/internal/elasticsearch/ingest/processor_bytes_data_source_test.go index aacc9cb93..e996865e5 100644 --- a/internal/elasticsearch/ingest/processor_bytes_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_bytes_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorBytes(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_circle_data_source_test.go b/internal/elasticsearch/ingest/processor_circle_data_source_test.go index 7c3afdb02..bf80fde5d 100644 --- a/internal/elasticsearch/ingest/processor_circle_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_circle_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorCircle(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_community_id_data_source_test.go b/internal/elasticsearch/ingest/processor_community_id_data_source_test.go index 3593d04cc..e173a0e65 100644 --- a/internal/elasticsearch/ingest/processor_community_id_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_community_id_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorCommunityId(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_convert_data_source_test.go b/internal/elasticsearch/ingest/processor_convert_data_source_test.go index 821cb4ccc..498762b1d 100644 --- a/internal/elasticsearch/ingest/processor_convert_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_convert_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorConvert(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_csv_data_source_test.go b/internal/elasticsearch/ingest/processor_csv_data_source_test.go index b71862845..72b7c8b94 100644 --- a/internal/elasticsearch/ingest/processor_csv_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_csv_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorCSV(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_date_data_source_test.go b/internal/elasticsearch/ingest/processor_date_data_source_test.go index eeec82f95..b04e3bd80 100644 --- a/internal/elasticsearch/ingest/processor_date_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_date_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorDate(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_date_index_name_data_source_test.go b/internal/elasticsearch/ingest/processor_date_index_name_data_source_test.go index e1445cb27..13bf0f70e 100644 --- a/internal/elasticsearch/ingest/processor_date_index_name_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_date_index_name_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorDateIndexName(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_dissect_data_source_test.go b/internal/elasticsearch/ingest/processor_dissect_data_source_test.go index 34d94cc0e..237f093ad 100644 --- a/internal/elasticsearch/ingest/processor_dissect_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_dissect_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorDissect(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_dot_expander_data_source_test.go b/internal/elasticsearch/ingest/processor_dot_expander_data_source_test.go index 6062a90c0..68975f735 100644 --- a/internal/elasticsearch/ingest/processor_dot_expander_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_dot_expander_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorDotExpander(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_drop_data_source_test.go b/internal/elasticsearch/ingest/processor_drop_data_source_test.go index 52f388856..2806d67fe 100644 --- a/internal/elasticsearch/ingest/processor_drop_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_drop_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorDrop(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_fail_data_source_test.go b/internal/elasticsearch/ingest/processor_fail_data_source_test.go index 58c9d56c2..81d0723e8 100644 --- a/internal/elasticsearch/ingest/processor_fail_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_fail_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorFail(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_fingerprint_data_source_test.go b/internal/elasticsearch/ingest/processor_fingerprint_data_source_test.go index 9430db472..2ceceb6e9 100644 --- a/internal/elasticsearch/ingest/processor_fingerprint_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_fingerprint_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorFingerprint(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_foreach_data_source_test.go b/internal/elasticsearch/ingest/processor_foreach_data_source_test.go index 39e29ac28..3e0e99145 100644 --- a/internal/elasticsearch/ingest/processor_foreach_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_foreach_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorForeach(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_geoip_data_source_test.go b/internal/elasticsearch/ingest/processor_geoip_data_source_test.go index f1701116b..8e401c3ee 100644 --- a/internal/elasticsearch/ingest/processor_geoip_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_geoip_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorGeoip(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_grok_data_source_test.go b/internal/elasticsearch/ingest/processor_grok_data_source_test.go index a27f2dd10..288f5b297 100644 --- a/internal/elasticsearch/ingest/processor_grok_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_grok_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorGrok(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_gsub_data_source_test.go b/internal/elasticsearch/ingest/processor_gsub_data_source_test.go index 3f67c93c1..227e28720 100644 --- a/internal/elasticsearch/ingest/processor_gsub_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_gsub_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorGsub(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_html_strip_data_source_test.go b/internal/elasticsearch/ingest/processor_html_strip_data_source_test.go index 947d668e9..0c53e8bf7 100644 --- a/internal/elasticsearch/ingest/processor_html_strip_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_html_strip_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorHtmlStrip(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_join_data_source_test.go b/internal/elasticsearch/ingest/processor_join_data_source_test.go index eaf305786..2ce23b037 100644 --- a/internal/elasticsearch/ingest/processor_join_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_join_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorJoin(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_json_data_source_test.go b/internal/elasticsearch/ingest/processor_json_data_source_test.go index e2561bb2e..e598161d1 100644 --- a/internal/elasticsearch/ingest/processor_json_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_json_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorJson(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_kv_data_source_test.go b/internal/elasticsearch/ingest/processor_kv_data_source_test.go index e6a0b545c..7d683bba5 100644 --- a/internal/elasticsearch/ingest/processor_kv_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_kv_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorKV(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_lowercase_data_source_test.go b/internal/elasticsearch/ingest/processor_lowercase_data_source_test.go index d3f1bef0e..fc54cc139 100644 --- a/internal/elasticsearch/ingest/processor_lowercase_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_lowercase_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorLowercase(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_network_direction_data_source_test.go b/internal/elasticsearch/ingest/processor_network_direction_data_source_test.go index 9da8f33ba..feadb3eb1 100644 --- a/internal/elasticsearch/ingest/processor_network_direction_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_network_direction_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorNetworkDirection(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_pipeline_data_source_test.go b/internal/elasticsearch/ingest/processor_pipeline_data_source_test.go index 5a18c6568..2f5c0714d 100644 --- a/internal/elasticsearch/ingest/processor_pipeline_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_pipeline_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorPipeline(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_registered_domain_data_source_test.go b/internal/elasticsearch/ingest/processor_registered_domain_data_source_test.go index 89824bc5b..2209b3e35 100644 --- a/internal/elasticsearch/ingest/processor_registered_domain_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_registered_domain_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorRegisteredDomain(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_remove_data_source_test.go b/internal/elasticsearch/ingest/processor_remove_data_source_test.go index ee9383466..fd4417d37 100644 --- a/internal/elasticsearch/ingest/processor_remove_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_remove_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorRemove(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_rename_data_source_test.go b/internal/elasticsearch/ingest/processor_rename_data_source_test.go index 0f246c484..9c11b9307 100644 --- a/internal/elasticsearch/ingest/processor_rename_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_rename_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorRename(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_reroute_data_source_test.go b/internal/elasticsearch/ingest/processor_reroute_data_source_test.go index 135793195..49ccfb098 100644 --- a/internal/elasticsearch/ingest/processor_reroute_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_reroute_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorReroute(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_script_data_source_test.go b/internal/elasticsearch/ingest/processor_script_data_source_test.go index ca4973646..b01d08bb2 100644 --- a/internal/elasticsearch/ingest/processor_script_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_script_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorScript(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_set_data_source_test.go b/internal/elasticsearch/ingest/processor_set_data_source_test.go index 221596685..ddf81dcb9 100644 --- a/internal/elasticsearch/ingest/processor_set_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_set_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorSet(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_set_security_user_data_source_test.go b/internal/elasticsearch/ingest/processor_set_security_user_data_source_test.go index a71da658a..f5dcbc593 100644 --- a/internal/elasticsearch/ingest/processor_set_security_user_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_set_security_user_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorSetSecurityUser(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_sort_data_source_test.go b/internal/elasticsearch/ingest/processor_sort_data_source_test.go index 59cc21db4..0a7776ca1 100644 --- a/internal/elasticsearch/ingest/processor_sort_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_sort_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorSort(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_split_data_source_test.go b/internal/elasticsearch/ingest/processor_split_data_source_test.go index 5de811a5c..210fedd7d 100644 --- a/internal/elasticsearch/ingest/processor_split_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_split_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorSplit(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_trim_data_source_test.go b/internal/elasticsearch/ingest/processor_trim_data_source_test.go index 5d8232846..ebcab360b 100644 --- a/internal/elasticsearch/ingest/processor_trim_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_trim_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorTrim(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_uppercase_data_source_test.go b/internal/elasticsearch/ingest/processor_uppercase_data_source_test.go index afcfacaca..b6c0f1e9d 100644 --- a/internal/elasticsearch/ingest/processor_uppercase_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_uppercase_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorUppercase(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_uri_parts_data_source_test.go b/internal/elasticsearch/ingest/processor_uri_parts_data_source_test.go index 63076128d..56e4a01a3 100644 --- a/internal/elasticsearch/ingest/processor_uri_parts_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_uri_parts_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorUriParts(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_urldecode_data_source_test.go b/internal/elasticsearch/ingest/processor_urldecode_data_source_test.go index e40687a19..0d2266db3 100644 --- a/internal/elasticsearch/ingest/processor_urldecode_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_urldecode_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorUrldecode(t *testing.T) { diff --git a/internal/elasticsearch/ingest/processor_user_agent_data_source_test.go b/internal/elasticsearch/ingest/processor_user_agent_data_source_test.go index b9dde3701..d6f02376d 100644 --- a/internal/elasticsearch/ingest/processor_user_agent_data_source_test.go +++ b/internal/elasticsearch/ingest/processor_user_agent_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceIngestProcessorUserAgent(t *testing.T) { diff --git a/internal/elasticsearch/logstash/pipeline_test.go b/internal/elasticsearch/logstash/pipeline_test.go index 8cf0346f4..bcbbd4eab 100644 --- a/internal/elasticsearch/logstash/pipeline_test.go +++ b/internal/elasticsearch/logstash/pipeline_test.go @@ -7,9 +7,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestResourceLogstashPipeline(t *testing.T) { diff --git a/internal/elasticsearch/security/api_key/acc_test.go b/internal/elasticsearch/security/api_key/acc_test.go index be998ea3f..e1ed158bd 100644 --- a/internal/elasticsearch/security/api_key/acc_test.go +++ b/internal/elasticsearch/security/api_key/acc_test.go @@ -18,9 +18,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceSecurityApiKey(t *testing.T) { @@ -258,9 +258,8 @@ func TestAccResourceSecurityApiKeyFromSDK(t *testing.T) { VersionConstraint: "0.11.9", }, }, - ProtoV6ProviderFactories: acctest.Providers, - SkipFunc: versionutils.CheckIfVersionIsUnsupported(api_key.MinVersion), - Config: testAccResourceSecurityApiKeyWithoutExpiration(apiKeyName), + SkipFunc: versionutils.CheckIfVersionIsUnsupported(api_key.MinVersion), + Config: testAccResourceSecurityApiKeyWithoutExpiration(apiKeyName), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_api_key.test", "name", apiKeyName), resource.TestCheckResourceAttrSet("elasticstack_elasticsearch_security_api_key.test", "role_descriptors"), diff --git a/internal/elasticsearch/security/role_data_source_test.go b/internal/elasticsearch/security/role_data_source_test.go index 0af0d4841..fb8fbab16 100644 --- a/internal/elasticsearch/security/role_data_source_test.go +++ b/internal/elasticsearch/security/role_data_source_test.go @@ -8,7 +8,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/security" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceSecurityRole(t *testing.T) { diff --git a/internal/elasticsearch/security/role_mapping_data_source_test.go b/internal/elasticsearch/security/role_mapping_data_source_test.go index 154aec605..026257a15 100644 --- a/internal/elasticsearch/security/role_mapping_data_source_test.go +++ b/internal/elasticsearch/security/role_mapping_data_source_test.go @@ -5,7 +5,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/acctest/checks" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceSecurityRoleMapping(t *testing.T) { diff --git a/internal/elasticsearch/security/role_mapping_test.go b/internal/elasticsearch/security/role_mapping_test.go index dcde6ebff..83c06e162 100644 --- a/internal/elasticsearch/security/role_mapping_test.go +++ b/internal/elasticsearch/security/role_mapping_test.go @@ -8,9 +8,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/acctest/checks" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestResourceRoleMapping(t *testing.T) { diff --git a/internal/elasticsearch/security/role_test.go b/internal/elasticsearch/security/role_test.go index 788764582..6824dd960 100644 --- a/internal/elasticsearch/security/role_test.go +++ b/internal/elasticsearch/security/role_test.go @@ -8,9 +8,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/security" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceSecurityRole(t *testing.T) { diff --git a/internal/elasticsearch/security/system_user/acc_test.go b/internal/elasticsearch/security/system_user/acc_test.go index 60c33b09e..4eba4b17b 100644 --- a/internal/elasticsearch/security/system_user/acc_test.go +++ b/internal/elasticsearch/security/system_user/acc_test.go @@ -6,7 +6,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/acctest/checks" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccResourceSecuritySystemUser(t *testing.T) { diff --git a/internal/elasticsearch/security/user_data_source_test.go b/internal/elasticsearch/security/user_data_source_test.go index 58d16c1d4..0a94cb61b 100644 --- a/internal/elasticsearch/security/user_data_source_test.go +++ b/internal/elasticsearch/security/user_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceSecurityUser(t *testing.T) { diff --git a/internal/elasticsearch/security/user_test.go b/internal/elasticsearch/security/user_test.go index 9311d3ca1..13cbd1c11 100644 --- a/internal/elasticsearch/security/user_test.go +++ b/internal/elasticsearch/security/user_test.go @@ -10,9 +10,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/acctest/checks" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceSecurityUser(t *testing.T) { @@ -45,7 +45,7 @@ func TestAccImportedUserDoesNotResetPassword(t *testing.T) { initialPassword := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) userUpdatedPassword := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) - resource.ParallelTest(t, resource.TestCase{ + resource.Test(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(t) }, CheckDestroy: checkResourceSecurityUserDestroy, ProtoV6ProviderFactories: acctest.Providers, diff --git a/internal/elasticsearch/transform/transform_test.go b/internal/elasticsearch/transform/transform_test.go index 8983906d0..45d457205 100644 --- a/internal/elasticsearch/transform/transform_test.go +++ b/internal/elasticsearch/transform/transform_test.go @@ -8,9 +8,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) var minSupportedDestAliasesVersion = version.Must(version.NewSemver("8.8.0")) diff --git a/internal/elasticsearch/watcher/watch_test.go b/internal/elasticsearch/watcher/watch_test.go index 8736d44ae..992431582 100644 --- a/internal/elasticsearch/watcher/watch_test.go +++ b/internal/elasticsearch/watcher/watch_test.go @@ -7,9 +7,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestResourceWatch(t *testing.T) { diff --git a/internal/fleet/agent_policy/resource_test.go b/internal/fleet/agent_policy/resource_test.go index e45ab4812..8f90217f4 100644 --- a/internal/fleet/agent_policy/resource_test.go +++ b/internal/fleet/agent_policy/resource_test.go @@ -14,9 +14,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) var minVersionAgentPolicy = version.Must(version.NewVersion("8.6.0")) diff --git a/internal/fleet/enrollment_tokens/data_source_test.go b/internal/fleet/enrollment_tokens/data_source_test.go index e4ad91346..c4c5793a9 100644 --- a/internal/fleet/enrollment_tokens/data_source_test.go +++ b/internal/fleet/enrollment_tokens/data_source_test.go @@ -11,8 +11,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) var minVersionEnrollmentTokens = version.Must(version.NewVersion("8.6.0")) diff --git a/internal/fleet/integration/resource_test.go b/internal/fleet/integration/resource_test.go index 18a505d5a..01c9424f6 100644 --- a/internal/fleet/integration/resource_test.go +++ b/internal/fleet/integration/resource_test.go @@ -11,8 +11,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients/fleet" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/stretchr/testify/require" ) diff --git a/internal/fleet/integration_ds/data_source_test.go b/internal/fleet/integration_ds/data_source_test.go index ab5d50734..d554627cd 100644 --- a/internal/fleet/integration_ds/data_source_test.go +++ b/internal/fleet/integration_ds/data_source_test.go @@ -7,8 +7,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) var minVersionIntegrationDataSource = version.Must(version.NewVersion("8.6.0")) diff --git a/internal/fleet/integration_policy/resource_test.go b/internal/fleet/integration_policy/resource_test.go index f2c4aeaa2..70c9375d3 100644 --- a/internal/fleet/integration_policy/resource_test.go +++ b/internal/fleet/integration_policy/resource_test.go @@ -14,9 +14,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/stretchr/testify/require" ) diff --git a/internal/fleet/output/resource_test.go b/internal/fleet/output/resource_test.go index cfdb5e66d..47d324567 100644 --- a/internal/fleet/output/resource_test.go +++ b/internal/fleet/output/resource_test.go @@ -11,9 +11,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) var minVersionOutput = version.Must(version.NewVersion("8.6.0")) diff --git a/internal/fleet/server_host/resource_test.go b/internal/fleet/server_host/resource_test.go index 904aad671..6c372e7dd 100644 --- a/internal/fleet/server_host/resource_test.go +++ b/internal/fleet/server_host/resource_test.go @@ -11,9 +11,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) var minVersionFleetServerHost = version.Must(version.NewVersion("8.6.0")) diff --git a/internal/kibana/alerting_test.go b/internal/kibana/alerting_test.go index 3ba648f49..06e9de4bb 100644 --- a/internal/kibana/alerting_test.go +++ b/internal/kibana/alerting_test.go @@ -10,9 +10,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceAlertingRule(t *testing.T) { diff --git a/internal/kibana/connector_data_source_test.go b/internal/kibana/connector_data_source_test.go index 4b24e866f..157911ddc 100644 --- a/internal/kibana/connector_data_source_test.go +++ b/internal/kibana/connector_data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceKibanaConnector(t *testing.T) { diff --git a/internal/kibana/connector_test.go b/internal/kibana/connector_test.go index 172b75b78..9005a125d 100644 --- a/internal/kibana/connector_test.go +++ b/internal/kibana/connector_test.go @@ -13,9 +13,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/google/uuid" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceKibanaConnectorCasesWebhook(t *testing.T) { diff --git a/internal/kibana/data_view/acc_test.go b/internal/kibana/data_view/acc_test.go index c6c77612e..3d2c1fed2 100644 --- a/internal/kibana/data_view/acc_test.go +++ b/internal/kibana/data_view/acc_test.go @@ -7,8 +7,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) var minDataViewAPISupport = version.Must(version.NewVersion("8.1.0")) diff --git a/internal/kibana/import_saved_objects/acc_test.go b/internal/kibana/import_saved_objects/acc_test.go index 3ee48f346..594b39b99 100644 --- a/internal/kibana/import_saved_objects/acc_test.go +++ b/internal/kibana/import_saved_objects/acc_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccResourceImportSavedObjects(t *testing.T) { diff --git a/internal/kibana/role_data_source_test.go b/internal/kibana/role_data_source_test.go index d54414186..cd8290a7e 100644 --- a/internal/kibana/role_data_source_test.go +++ b/internal/kibana/role_data_source_test.go @@ -7,7 +7,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest/checks" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccDataSourceKibanaSecurityRole(t *testing.T) { diff --git a/internal/kibana/role_test.go b/internal/kibana/role_test.go index 4b48087d8..f91d484e2 100644 --- a/internal/kibana/role_test.go +++ b/internal/kibana/role_test.go @@ -9,9 +9,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceKibanaSecurityRole(t *testing.T) { diff --git a/internal/kibana/slo_test.go b/internal/kibana/slo_test.go index 94ab9b762..23a287c1a 100644 --- a/internal/kibana/slo_test.go +++ b/internal/kibana/slo_test.go @@ -16,9 +16,9 @@ import ( "github.com/hashicorp/go-version" "github.com/stretchr/testify/require" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) var sloTimesliceMetricsMinVersion = version.Must(version.NewVersion("8.12.0")) @@ -190,8 +190,7 @@ func TestAccResourceSloGroupBy(t *testing.T) { VersionConstraint: "0.11.11", }, }, - ProtoV6ProviderFactories: acctest.Providers, - SkipFunc: versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsMultipleGroupByMinVersion), + SkipFunc: versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsMultipleGroupByMinVersion), Config: getSLOConfig(sloVars{ name: sloName, indicatorType: "metric_custom_indicator", diff --git a/internal/kibana/space_test.go b/internal/kibana/space_test.go index 2f3a2cb51..e0a93a9f7 100644 --- a/internal/kibana/space_test.go +++ b/internal/kibana/space_test.go @@ -8,9 +8,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/kibana" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceSpace(t *testing.T) { diff --git a/internal/kibana/spaces/data_source_test.go b/internal/kibana/spaces/data_source_test.go index efc1cc246..e67d9a20c 100644 --- a/internal/kibana/spaces/data_source_test.go +++ b/internal/kibana/spaces/data_source_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestAccSpacesDataSource(t *testing.T) { diff --git a/internal/kibana/synthetics/acc_pl_test.go b/internal/kibana/synthetics/acc_pl_test.go index 012b04cfd..bdc607841 100644 --- a/internal/kibana/synthetics/acc_pl_test.go +++ b/internal/kibana/synthetics/acc_pl_test.go @@ -10,7 +10,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) const ( diff --git a/internal/kibana/synthetics/acc_test.go b/internal/kibana/synthetics/acc_test.go index 346fafbcd..ddde5a150 100644 --- a/internal/kibana/synthetics/acc_test.go +++ b/internal/kibana/synthetics/acc_test.go @@ -7,8 +7,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) var ( diff --git a/internal/kibana/synthetics/parameter/resource_test.go b/internal/kibana/synthetics/parameter/resource_test.go index 3a4ff0920..516fe3fdd 100644 --- a/internal/kibana/synthetics/parameter/resource_test.go +++ b/internal/kibana/synthetics/parameter/resource_test.go @@ -6,7 +6,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/hashicorp/go-version" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) const ( diff --git a/provider/factory_test.go b/provider/factory_test.go index 3b2e89fbc..9a6372810 100644 --- a/provider/factory_test.go +++ b/provider/factory_test.go @@ -5,7 +5,7 @@ import ( "testing" "github.com/hashicorp/terraform-plugin-go/tfprotov6" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) func TestMuxServer(t *testing.T) { diff --git a/provider/provider_test.go b/provider/provider_test.go index 112e5e1ca..d66aac1ed 100644 --- a/provider/provider_test.go +++ b/provider/provider_test.go @@ -12,8 +12,8 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" "github.com/elastic/terraform-provider-elasticstack/provider" "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" ) var minVersionForFleet = version.Must(version.NewVersion("8.6.0")) From 4432fc61918fe26f5f04456866b5b7cf1902d2d5 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Tue, 2 Sep 2025 04:30:36 +0000 Subject: [PATCH 37/66] fix(deps): update module github.com/stretchr/testify to v1.11.1 (#1272) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- libs/go-kibana-rest/go.mod | 2 +- libs/go-kibana-rest/go.sum | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index fa7f690c4..0a69fdece 100644 --- a/go.mod +++ b/go.mod @@ -22,7 +22,7 @@ require ( github.com/hashicorp/terraform-plugin-testing v1.13.3 github.com/mitchellh/mapstructure v1.5.1-0.20231216201459-8508981c8b6c github.com/oapi-codegen/runtime v1.1.2 - github.com/stretchr/testify v1.11.0 + github.com/stretchr/testify v1.11.1 go.uber.org/mock v0.6.0 ) diff --git a/go.sum b/go.sum index a75eb60ec..101f85864 100644 --- a/go.sum +++ b/go.sum @@ -988,8 +988,8 @@ github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/stretchr/testify v1.11.0 h1:ib4sjIrwZKxE5u/Japgo/7SJV3PvgjGiRNAvTVGqQl8= -github.com/stretchr/testify v1.11.0/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/theupdateframework/go-tuf v0.7.0 h1:CqbQFrWo1ae3/I0UCblSbczevCCbS31Qvs5LdxRWqRI= diff --git a/libs/go-kibana-rest/go.mod b/libs/go-kibana-rest/go.mod index fcadc9441..09736688e 100644 --- a/libs/go-kibana-rest/go.mod +++ b/libs/go-kibana-rest/go.mod @@ -8,7 +8,7 @@ require ( github.com/go-resty/resty/v2 v2.16.5 github.com/google/uuid v1.6.0 github.com/sirupsen/logrus v1.9.3 - github.com/stretchr/testify v1.10.0 + github.com/stretchr/testify v1.11.1 github.com/x-cray/logrus-prefixed-formatter v0.5.2 ) diff --git a/libs/go-kibana-rest/go.sum b/libs/go-kibana-rest/go.sum index ec80a3a4b..a43635892 100644 --- a/libs/go-kibana-rest/go.sum +++ b/libs/go-kibana-rest/go.sum @@ -46,8 +46,8 @@ github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVs github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= +github.com/stretchr/testify v1.11.1/go.mod h1:wZwfW3scLgRK+23gO65QZefKpKQRnfz6sD981Nm4B6U= github.com/x-cray/logrus-prefixed-formatter v0.5.2 h1:00txxvfBM9muc0jiLIEAkAcIMJzfthRT6usrui8uGmg= github.com/x-cray/logrus-prefixed-formatter v0.5.2/go.mod h1:2duySbKsL6M18s5GU7VPsoEPHyzalCE06qoARUCeBBE= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= From 5f2154e5b45208b66c7c4880398414b4cf27e49d Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Tue, 2 Sep 2025 17:20:41 +1000 Subject: [PATCH 38/66] Somehow I didn't merge this properly... (#1273) --- internal/elasticsearch/enrich/acc_test.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/internal/elasticsearch/enrich/acc_test.go b/internal/elasticsearch/enrich/acc_test.go index cfbaf346e..0fcb9c5c4 100644 --- a/internal/elasticsearch/enrich/acc_test.go +++ b/internal/elasticsearch/enrich/acc_test.go @@ -8,9 +8,9 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/acctest" "github.com/elastic/terraform-provider-elasticstack/internal/clients" - sdkacctest "github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" - "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" ) func TestAccResourceEnrichPolicyFW(t *testing.T) { From 61a23dc1d1076f188c4ec407ee2fdee0f8802354 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Fri, 5 Sep 2025 13:54:49 +1000 Subject: [PATCH 39/66] Support cross cluster API keys with the existing API key resource. (#1252) * Add support for cross cluster api keys to the api key resource * Add docs and example * CHANGELOG.md * Set role_descriptors to unknown for cross cluster keys when the access description changes * make lint --- CHANGELOG.md | 1 + .../elasticsearch_security_api_key.md | 65 ++++ .../resource.tf | 32 ++ internal/clients/elasticsearch/security.go | 55 ++++ .../security/api_key/acc_test.go | 101 ++++++ .../elasticsearch/security/api_key/create.go | 98 +++++- .../elasticsearch/security/api_key/models.go | 138 +++++++- .../security/api_key/resource.go | 4 +- .../elasticsearch/security/api_key/schema.go | 77 ++++- .../set_unknown_if_access_has_changes.go | 57 ++++ .../set_unknown_if_access_has_changes_test.go | 294 ++++++++++++++++++ .../security/api_key/state_upgrade.go | 14 + .../elasticsearch/security/api_key/update.go | 34 +- .../security/api_key/validators.go | 85 +++++ .../security/api_key/validators_test.go | 97 ++++++ internal/models/models.go | 30 ++ 16 files changed, 1157 insertions(+), 25 deletions(-) create mode 100644 internal/elasticsearch/security/api_key/set_unknown_if_access_has_changes.go create mode 100644 internal/elasticsearch/security/api_key/set_unknown_if_access_has_changes_test.go create mode 100644 internal/elasticsearch/security/api_key/validators.go create mode 100644 internal/elasticsearch/security/api_key/validators_test.go diff --git a/CHANGELOG.md b/CHANGELOG.md index 94fbd16a6..ad0798e5f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,7 @@ - Add `ignore_missing_component_templates` to `elasticstack_elasticsearch_index_template` ([#1206](https://github.com/elastic/terraform-provider-elasticstack/pull/1206)) - Migrate `elasticstack_elasticsearch_enrich_policy` resource and data source to Terraform Plugin Framework ([#1220](https://github.com/elastic/terraform-provider-elasticstack/pull/1220)) - Prevent provider panic when a script exists in state, but not in Elasticsearch ([#1218](https://github.com/elastic/terraform-provider-elasticstack/pull/1218)) +- Add support for managing cross_cluster API keys in `elasticstack_elasticsearch_security_api_key` ([#1252](https://github.com/elastic/terraform-provider-elasticstack/pull/1252)) - Allow version changes without a destroy/create cycle with `elasticstack_fleet_integration` ([#1255](https://github.com/elastic/terraform-provider-elasticstack/pull/1255)). This fixes an issue where it was impossible to upgrade integrations which are used by an integration policy. - Add `namespace` attribute to `elasticstack_kibana_synthetics_monitor` resource to support setting data stream namespace independently from `space_id` ([#1247](https://github.com/elastic/terraform-provider-elasticstack/pull/1247)) diff --git a/docs/resources/elasticsearch_security_api_key.md b/docs/resources/elasticsearch_security_api_key.md index b014d4bd7..d7389c0b1 100644 --- a/docs/resources/elasticsearch_security_api_key.md +++ b/docs/resources/elasticsearch_security_api_key.md @@ -76,6 +76,38 @@ output "api_key" { value = elasticstack_elasticsearch_security_api_key.api_key sensitive = true } + +# Example: Cross-cluster API key +resource "elasticstack_elasticsearch_security_api_key" "cross_cluster_key" { + name = "My Cross-Cluster API Key" + type = "cross_cluster" + + # Define access permissions for cross-cluster operations + access = { + + # Grant replication access to specific indices + replication = [ + { + names = ["archive-*"] + } + ] + } + + # Set the expiration for the API key + expiration = "30d" + + # Set arbitrary metadata + metadata = jsonencode({ + description = "Cross-cluster key for production environment" + environment = "production" + team = "platform" + }) +} + +output "cross_cluster_api_key" { + value = elasticstack_elasticsearch_security_api_key.cross_cluster_key + sensitive = true +} ``` @@ -87,10 +119,12 @@ output "api_key" { ### Optional +- `access` (Attributes) Access configuration for cross-cluster API keys. Only applicable when type is 'cross_cluster'. (see [below for nested schema](#nestedatt--access)) - `elasticsearch_connection` (Block List, Deprecated) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch_connection)) - `expiration` (String) Expiration time for the API key. By default, API keys never expire. - `metadata` (String) Arbitrary metadata that you want to associate with the API key. - `role_descriptors` (String) Role descriptors for this API key. +- `type` (String) The type of API key. Valid values are 'rest' (default) and 'cross_cluster'. Cross-cluster API keys are used for cross-cluster search and replication. ### Read-Only @@ -100,6 +134,37 @@ output "api_key" { - `id` (String) Internal identifier of the resource. - `key_id` (String) Unique id for this API key. + +### Nested Schema for `access` + +Optional: + +- `replication` (Attributes List) A list of replication configurations for which the cross-cluster API key will have replication privileges. (see [below for nested schema](#nestedatt--access--replication)) +- `search` (Attributes List) A list of search configurations for which the cross-cluster API key will have search privileges. (see [below for nested schema](#nestedatt--access--search)) + + +### Nested Schema for `access.replication` + +Required: + +- `names` (List of String) A list of index patterns for replication. + + + +### Nested Schema for `access.search` + +Required: + +- `names` (List of String) A list of index patterns for search. + +Optional: + +- `allow_restricted_indices` (Boolean) Whether to allow access to restricted indices. +- `field_security` (String) Field-level security configuration in JSON format. +- `query` (String) Query to filter documents for search operations in JSON format. + + + ### Nested Schema for `elasticsearch_connection` diff --git a/examples/resources/elasticstack_elasticsearch_security_api_key/resource.tf b/examples/resources/elasticstack_elasticsearch_security_api_key/resource.tf index f974e130b..53c13099b 100644 --- a/examples/resources/elasticstack_elasticsearch_security_api_key/resource.tf +++ b/examples/resources/elasticstack_elasticsearch_security_api_key/resource.tf @@ -61,3 +61,35 @@ output "api_key" { value = elasticstack_elasticsearch_security_api_key.api_key sensitive = true } + +# Example: Cross-cluster API key +resource "elasticstack_elasticsearch_security_api_key" "cross_cluster_key" { + name = "My Cross-Cluster API Key" + type = "cross_cluster" + + # Define access permissions for cross-cluster operations + access = { + + # Grant replication access to specific indices + replication = [ + { + names = ["archive-*"] + } + ] + } + + # Set the expiration for the API key + expiration = "30d" + + # Set arbitrary metadata + metadata = jsonencode({ + description = "Cross-cluster key for production environment" + environment = "production" + team = "platform" + }) +} + +output "cross_cluster_api_key" { + value = elasticstack_elasticsearch_security_api_key.cross_cluster_key + sensitive = true +} diff --git a/internal/clients/elasticsearch/security.go b/internal/clients/elasticsearch/security.go index 6ce83aa09..bfeb5e3e5 100644 --- a/internal/clients/elasticsearch/security.go +++ b/internal/clients/elasticsearch/security.go @@ -440,3 +440,58 @@ func DeleteApiKey(apiClient *clients.ApiClient, id string) fwdiag.Diagnostics { } return nil } + +func CreateCrossClusterApiKey(apiClient *clients.ApiClient, apikey *models.CrossClusterApiKey) (*models.CrossClusterApiKeyCreateResponse, fwdiag.Diagnostics) { + apikeyBytes, err := json.Marshal(apikey) + if err != nil { + return nil, utils.FrameworkDiagFromError(err) + } + + esClient, err := apiClient.GetESClient() + if err != nil { + return nil, utils.FrameworkDiagFromError(err) + } + res, err := esClient.Security.CreateCrossClusterAPIKey(bytes.NewReader(apikeyBytes)) + if err != nil { + return nil, utils.FrameworkDiagFromError(err) + } + defer res.Body.Close() + if diags := utils.CheckError(res, "Unable to create cross cluster apikey"); diags.HasError() { + return nil, utils.FrameworkDiagsFromSDK(diags) + } + + var apiKey models.CrossClusterApiKeyCreateResponse + + if err := json.NewDecoder(res.Body).Decode(&apiKey); err != nil { + return nil, utils.FrameworkDiagFromError(err) + } + + return &apiKey, nil +} + +func UpdateCrossClusterApiKey(apiClient *clients.ApiClient, apikey models.CrossClusterApiKey) fwdiag.Diagnostics { + id := apikey.ID + + apikey.Expiration = "" + apikey.Name = "" + apikey.ID = "" + apikeyBytes, err := json.Marshal(apikey) + if err != nil { + return utils.FrameworkDiagFromError(err) + } + + esClient, err := apiClient.GetESClient() + if err != nil { + return utils.FrameworkDiagFromError(err) + } + res, err := esClient.Security.UpdateCrossClusterAPIKey(id, bytes.NewReader(apikeyBytes)) + if err != nil { + return utils.FrameworkDiagFromError(err) + } + defer res.Body.Close() + if diags := utils.CheckError(res, "Unable to update cross cluster apikey"); diags.HasError() { + return utils.FrameworkDiagsFromSDK(diags) + } + + return nil +} diff --git a/internal/elasticsearch/security/api_key/acc_test.go b/internal/elasticsearch/security/api_key/acc_test.go index e1ed158bd..be1d13152 100644 --- a/internal/elasticsearch/security/api_key/acc_test.go +++ b/internal/elasticsearch/security/api_key/acc_test.go @@ -449,3 +449,104 @@ func checkResourceSecurityApiKeyDestroy(s *terraform.State) error { } return nil } + +func TestAccResourceSecurityApiKeyCrossCluster(t *testing.T) { + // generate a random name + apiKeyName := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + CheckDestroy: checkResourceSecurityApiKeyDestroy, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(api_key.MinVersionWithCrossCluster), + Config: testAccResourceSecurityApiKeyCrossClusterCreate(apiKeyName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_api_key.test", "name", apiKeyName), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_api_key.test", "type", "cross_cluster"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_api_key.test", "access.search.0.names.0", "logs-*"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_api_key.test", "access.search.0.names.1", "metrics-*"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_api_key.test", "access.replication.0.names.0", "archive-*"), + ), + }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(api_key.MinVersionWithCrossCluster), + Config: testAccResourceSecurityApiKeyCrossClusterUpdate(apiKeyName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_api_key.test", "name", apiKeyName), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_api_key.test", "type", "cross_cluster"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_api_key.test", "access.search.0.names.0", "log-*"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_api_key.test", "access.search.0.names.1", "metrics-*"), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_api_key.test", "access.replication.0.names.0", "archives-*"), + ), + }, + }, + }) +} + +func testAccResourceSecurityApiKeyCrossClusterCreate(apiKeyName string) string { + return fmt.Sprintf(` +provider "elasticstack" { + elasticsearch {} +} + +resource "elasticstack_elasticsearch_security_api_key" "test" { + name = "%s" + type = "cross_cluster" + + access = { + search = [ + { + names = ["logs-*", "metrics-*"] + } + ] + replication = [ + { + names = ["archive-*"] + } + ] + } + + expiration = "30d" + + metadata = jsonencode({ + description = "Cross-cluster test key" + environment = "test" + }) +} + `, apiKeyName) +} + +func testAccResourceSecurityApiKeyCrossClusterUpdate(apiKeyName string) string { + return fmt.Sprintf(` +provider "elasticstack" { + elasticsearch {} +} + +resource "elasticstack_elasticsearch_security_api_key" "test" { + name = "%s" + type = "cross_cluster" + + access = { + search = [ + { + names = ["log-*", "metrics-*"] + } + ] + replication = [ + { + names = ["archives-*"] + } + ] + } + + expiration = "30d" + + metadata = jsonencode({ + description = "Cross-cluster test key" + environment = "test" + }) +} + `, apiKeyName) +} diff --git a/internal/elasticsearch/security/api_key/create.go b/internal/elasticsearch/security/api_key/create.go index da0b95580..18f762133 100644 --- a/internal/elasticsearch/security/api_key/create.go +++ b/internal/elasticsearch/security/api_key/create.go @@ -28,26 +28,18 @@ func (r Resource) Create(ctx context.Context, req resource.CreateRequest, resp * return } - apiModel, diags := r.buildApiModel(ctx, planModel, client) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - - putResponse, diags := elasticsearch.CreateApiKey(client, &apiModel) - resp.Diagnostics.Append(diags...) - if putResponse == nil || resp.Diagnostics.HasError() { - return + if planModel.Type.ValueString() == "cross_cluster" { + createDiags := r.createCrossClusterApiKey(ctx, client, &planModel) + resp.Diagnostics.Append(createDiags...) + } else { + createDiags := r.createApiKey(ctx, client, &planModel) + resp.Diagnostics.Append(createDiags...) } - id, sdkDiags := client.ID(ctx, putResponse.Id) - resp.Diagnostics.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) if resp.Diagnostics.HasError() { return } - planModel.ID = basetypes.NewStringValue(id.String()) - planModel.populateFromCreate(*putResponse) resp.Diagnostics.Append(resp.State.Set(ctx, planModel)...) if resp.Diagnostics.HasError() { return @@ -105,3 +97,81 @@ func doesCurrentVersionSupportRestrictionOnApiKey(ctx context.Context, client *c return currentVersion.GreaterThanOrEqual(MinVersionWithRestriction), nil } + +func doesCurrentVersionSupportCrossClusterApiKey(ctx context.Context, client *clients.ApiClient) (bool, diag.Diagnostics) { + currentVersion, diags := client.ServerVersion(ctx) + + if diags.HasError() { + return false, utils.FrameworkDiagsFromSDK(diags) + } + + return currentVersion.GreaterThanOrEqual(MinVersionWithCrossCluster), nil +} + +func (r *Resource) createCrossClusterApiKey(ctx context.Context, client *clients.ApiClient, planModel *tfModel) diag.Diagnostics { + // Check if the current version supports cross-cluster API keys + isSupported, diags := doesCurrentVersionSupportCrossClusterApiKey(ctx, client) + if diags.HasError() { + return diags + } + if !isSupported { + return diag.Diagnostics{ + diag.NewErrorDiagnostic( + "Cross-cluster API keys not supported", + fmt.Sprintf("Cross-cluster API keys are only supported in Elasticsearch version %s and above.", MinVersionWithCrossCluster.String()), + ), + } + } + + // Handle cross-cluster API key creation + crossClusterModel, diags := planModel.toCrossClusterAPIModel(ctx) + if diags.HasError() { + return diags + } + + putResponse, createDiags := elasticsearch.CreateCrossClusterApiKey(client, &crossClusterModel) + if createDiags.HasError() { + return diag.Diagnostics(createDiags) + } + if putResponse == nil { + return diag.Diagnostics{ + diag.NewErrorDiagnostic("API Key Creation Failed", "Cross-cluster API key creation returned nil response"), + } + } + + id, sdkDiags := client.ID(ctx, putResponse.Id) + if sdkDiags.HasError() { + return utils.FrameworkDiagsFromSDK(sdkDiags) + } + + planModel.ID = basetypes.NewStringValue(id.String()) + planModel.populateFromCrossClusterCreate(*putResponse) + return nil +} + +func (r *Resource) createApiKey(ctx context.Context, client *clients.ApiClient, planModel *tfModel) diag.Diagnostics { + // Handle regular API key creation + apiModel, diags := r.buildApiModel(ctx, *planModel, client) + if diags.HasError() { + return diags + } + + putResponse, createDiags := elasticsearch.CreateApiKey(client, &apiModel) + if createDiags.HasError() { + return diag.Diagnostics(createDiags) + } + if putResponse == nil { + return diag.Diagnostics{ + diag.NewErrorDiagnostic("API Key Creation Failed", "API key creation returned nil response"), + } + } + + id, sdkDiags := client.ID(ctx, putResponse.Id) + if sdkDiags.HasError() { + return utils.FrameworkDiagsFromSDK(sdkDiags) + } + + planModel.ID = basetypes.NewStringValue(id.String()) + planModel.populateFromCreate(*putResponse) + return nil +} diff --git a/internal/elasticsearch/security/api_key/models.go b/internal/elasticsearch/security/api_key/models.go index 7a9af1ec7..8847f68e9 100644 --- a/internal/elasticsearch/security/api_key/models.go +++ b/internal/elasticsearch/security/api_key/models.go @@ -1,6 +1,7 @@ package api_key import ( + "context" "encoding/json" "github.com/elastic/terraform-provider-elasticstack/internal/clients" @@ -13,15 +14,33 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types/basetypes" ) +type searchModel struct { + Names types.List `tfsdk:"names"` + FieldSecurity jsontypes.Normalized `tfsdk:"field_security"` + Query jsontypes.Normalized `tfsdk:"query"` + AllowRestrictedIndices types.Bool `tfsdk:"allow_restricted_indices"` +} + +type replicationModel struct { + Names types.List `tfsdk:"names"` +} + +type accessModel struct { + Search types.List `tfsdk:"search"` + Replication types.List `tfsdk:"replication"` +} + type tfModel struct { ID types.String `tfsdk:"id"` ElasticsearchConnection types.List `tfsdk:"elasticsearch_connection"` KeyID types.String `tfsdk:"key_id"` Name types.String `tfsdk:"name"` + Type types.String `tfsdk:"type"` RoleDescriptors jsontypes.Normalized `tfsdk:"role_descriptors"` Expiration types.String `tfsdk:"expiration"` ExpirationTimestamp types.Int64 `tfsdk:"expiration_timestamp"` Metadata jsontypes.Normalized `tfsdk:"metadata"` + Access types.Object `tfsdk:"access"` APIKey types.String `tfsdk:"api_key"` Encoded types.String `tfsdk:"encoded"` } @@ -49,10 +68,113 @@ func (model tfModel) toAPIModel() (models.ApiKey, diag.Diagnostics) { } } - if utils.IsKnown(model.RoleDescriptors) { - diags := model.RoleDescriptors.Unmarshal(&apiModel.RolesDescriptors) + diags := model.RoleDescriptors.Unmarshal(&apiModel.RolesDescriptors) + if diags.HasError() { + return models.ApiKey{}, diags + } + + return apiModel, nil +} + +func (model tfModel) toCrossClusterAPIModel(ctx context.Context) (models.CrossClusterApiKey, diag.Diagnostics) { + apiModel := models.CrossClusterApiKey{ + ID: model.KeyID.ValueString(), + Name: model.Name.ValueString(), + Expiration: model.Expiration.ValueString(), + } + + if utils.IsKnown(model.Metadata) { + diags := model.Metadata.Unmarshal(&apiModel.Metadata) if diags.HasError() { - return models.ApiKey{}, diags + return models.CrossClusterApiKey{}, diags + } + } + + // Build the access configuration + access := &models.CrossClusterApiKeyAccess{} + + if utils.IsKnown(model.Access) { + var accessData accessModel + diags := model.Access.As(ctx, &accessData, basetypes.ObjectAsOptions{}) + if diags.HasError() { + return models.CrossClusterApiKey{}, diags + } + + if utils.IsKnown(accessData.Search) { + var searchObjects []searchModel + diags := accessData.Search.ElementsAs(ctx, &searchObjects, false) + if diags.HasError() { + return models.CrossClusterApiKey{}, diags + } + + var searchEntries []models.CrossClusterApiKeyAccessEntry + for _, searchObj := range searchObjects { + entry := models.CrossClusterApiKeyAccessEntry{} + + if utils.IsKnown(searchObj.Names) { + var names []string + diags := searchObj.Names.ElementsAs(ctx, &names, false) + if diags.HasError() { + return models.CrossClusterApiKey{}, diags + } + entry.Names = names + } + + if utils.IsKnown(searchObj.FieldSecurity) && !searchObj.FieldSecurity.IsNull() { + var fieldSecurity models.FieldSecurity + diags := json.Unmarshal([]byte(searchObj.FieldSecurity.ValueString()), &fieldSecurity) + if diags != nil { + return models.CrossClusterApiKey{}, diag.Diagnostics{diag.NewErrorDiagnostic("Failed to unmarshal field_security", diags.Error())} + } + entry.FieldSecurity = &fieldSecurity + } + + if utils.IsKnown(searchObj.Query) && !searchObj.Query.IsNull() { + query := searchObj.Query.ValueString() + entry.Query = &query + } + + if utils.IsKnown(searchObj.AllowRestrictedIndices) { + allowRestricted := searchObj.AllowRestrictedIndices.ValueBool() + entry.AllowRestrictedIndices = &allowRestricted + } + + searchEntries = append(searchEntries, entry) + } + if len(searchEntries) > 0 { + access.Search = searchEntries + } + } + + if utils.IsKnown(accessData.Replication) { + var replicationObjects []replicationModel + diags := accessData.Replication.ElementsAs(ctx, &replicationObjects, false) + if diags.HasError() { + return models.CrossClusterApiKey{}, diags + } + + var replicationEntries []models.CrossClusterApiKeyAccessEntry + for _, replicationObj := range replicationObjects { + if utils.IsKnown(replicationObj.Names) { + var names []string + diags := replicationObj.Names.ElementsAs(ctx, &names, false) + if diags.HasError() { + return models.CrossClusterApiKey{}, diags + } + if len(names) > 0 { + replicationEntries = append(replicationEntries, models.CrossClusterApiKeyAccessEntry{ + Names: names, + }) + } + } + } + if len(replicationEntries) > 0 { + access.Replication = replicationEntries + } + } + + if access.Search != nil || access.Replication != nil { + apiModel.Access = access } } @@ -66,6 +188,16 @@ func (model *tfModel) populateFromCreate(apiKey models.ApiKeyCreateResponse) { model.Encoded = basetypes.NewStringValue(apiKey.EncodedKey) } +func (model *tfModel) populateFromCrossClusterCreate(apiKey models.CrossClusterApiKeyCreateResponse) { + model.KeyID = basetypes.NewStringValue(apiKey.Id) + model.Name = basetypes.NewStringValue(apiKey.Name) + model.APIKey = basetypes.NewStringValue(apiKey.Key) + model.Encoded = basetypes.NewStringValue(apiKey.EncodedKey) + if apiKey.Expiration > 0 { + model.ExpirationTimestamp = basetypes.NewInt64Value(apiKey.Expiration) + } +} + func (model *tfModel) populateFromAPI(apiKey models.ApiKeyResponse, serverVersion *version.Version) diag.Diagnostics { model.KeyID = basetypes.NewStringValue(apiKey.Id) model.Name = basetypes.NewStringValue(apiKey.Name) diff --git a/internal/elasticsearch/security/api_key/resource.go b/internal/elasticsearch/security/api_key/resource.go index 5cc9027ef..3a3e0c27d 100644 --- a/internal/elasticsearch/security/api_key/resource.go +++ b/internal/elasticsearch/security/api_key/resource.go @@ -15,11 +15,13 @@ import ( var _ resource.Resource = &Resource{} var _ resource.ResourceWithConfigure = &Resource{} var _ resource.ResourceWithUpgradeState = &Resource{} + var ( MinVersion = version.Must(version.NewVersion("8.0.0")) // Enabled in 8.0 MinVersionWithUpdate = version.Must(version.NewVersion("8.4.0")) MinVersionReturningRoleDescriptors = version.Must(version.NewVersion("8.5.0")) - MinVersionWithRestriction = version.Must(version.NewVersion("8.9.0")) // Enabled in 8.0 + MinVersionWithRestriction = version.Must(version.NewVersion("8.9.0")) // Enabled in 8.0 + MinVersionWithCrossCluster = version.Must(version.NewVersion("8.10.0")) // Cross-cluster API keys enabled in 8.10 ) type Resource struct { diff --git a/internal/elasticsearch/security/api_key/schema.go b/internal/elasticsearch/security/api_key/schema.go index 74f688785..aeb62f0d3 100644 --- a/internal/elasticsearch/security/api_key/schema.go +++ b/internal/elasticsearch/security/api_key/schema.go @@ -12,11 +12,18 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" providerschema "github.com/elastic/terraform-provider-elasticstack/internal/schema" + "github.com/elastic/terraform-provider-elasticstack/internal/utils/planmodifiers" ) -const currentSchemaVersion int64 = 1 +const ( + currentSchemaVersion int64 = 2 + restAPIKeyType = "rest" + crossClusterAPIKeyType = "cross_cluster" + defaultAPIKeyType = restAPIKeyType +) func (r *Resource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = r.getSchema(currentSchemaVersion) @@ -25,7 +32,7 @@ func (r *Resource) Schema(_ context.Context, _ resource.SchemaRequest, resp *res func (r *Resource) getSchema(version int64) schema.Schema { return schema.Schema{ Version: version, - Description: "Creates an API key for access without requiring basic authentication. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html", + Description: "Creates an API key for access without requiring basic authentication. Supports both regular API keys and cross-cluster API keys. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html and https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-cross-cluster-api-key.html", Blocks: map[string]schema.Block{ "elasticsearch_connection": providerschema.GetEsFWConnectionBlock("elasticsearch_connection", false), }, @@ -55,14 +62,31 @@ func (r *Resource) getSchema(version int64) schema.Schema { stringplanmodifier.RequiresReplace(), }, }, + "type": schema.StringAttribute{ + Description: "The type of API key. Valid values are 'rest' (default) and 'cross_cluster'. Cross-cluster API keys are used for cross-cluster search and replication.", + Optional: true, + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf(defaultAPIKeyType, crossClusterAPIKeyType), + }, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + planmodifiers.StringUseDefaultIfUnknown(defaultAPIKeyType), + }, + }, "role_descriptors": schema.StringAttribute{ Description: "Role descriptors for this API key.", CustomType: jsontypes.NormalizedType{}, Optional: true, Computed: true, + Validators: []validator.String{ + RequiresType(defaultAPIKeyType), + }, PlanModifiers: []planmodifier.String{ stringplanmodifier.UseStateForUnknown(), r.requiresReplaceIfUpdateNotSupported(), + SetUnknownIfAccessHasChanges(), }, }, "expiration": schema.StringAttribute{ @@ -89,6 +113,55 @@ func (r *Resource) getSchema(version int64) schema.Schema { r.requiresReplaceIfUpdateNotSupported(), }, }, + "access": schema.SingleNestedAttribute{ + Description: "Access configuration for cross-cluster API keys. Only applicable when type is 'cross_cluster'.", + Optional: true, + Validators: []validator.Object{ + RequiresType(crossClusterAPIKeyType), + }, + Attributes: map[string]schema.Attribute{ + "search": schema.ListNestedAttribute{ + Description: "A list of search configurations for which the cross-cluster API key will have search privileges.", + Optional: true, + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "names": schema.ListAttribute{ + Description: "A list of index patterns for search.", + Required: true, + ElementType: types.StringType, + }, + "field_security": schema.StringAttribute{ + Description: "Field-level security configuration in JSON format.", + Optional: true, + CustomType: jsontypes.NormalizedType{}, + }, + "query": schema.StringAttribute{ + Description: "Query to filter documents for search operations in JSON format.", + Optional: true, + CustomType: jsontypes.NormalizedType{}, + }, + "allow_restricted_indices": schema.BoolAttribute{ + Description: "Whether to allow access to restricted indices.", + Optional: true, + }, + }, + }, + }, + "replication": schema.ListNestedAttribute{ + Description: "A list of replication configurations for which the cross-cluster API key will have replication privileges.", + Optional: true, + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "names": schema.ListAttribute{ + Description: "A list of index patterns for replication.", + Required: true, + ElementType: types.StringType, + }, + }, + }, + }, + }, + }, "api_key": schema.StringAttribute{ Description: "Generated API Key.", Sensitive: true, diff --git a/internal/elasticsearch/security/api_key/set_unknown_if_access_has_changes.go b/internal/elasticsearch/security/api_key/set_unknown_if_access_has_changes.go new file mode 100644 index 000000000..f17e95f91 --- /dev/null +++ b/internal/elasticsearch/security/api_key/set_unknown_if_access_has_changes.go @@ -0,0 +1,57 @@ +package api_key + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +// SetUnknownIfAccessHasChanges returns a plan modifier that sets the current attribute to unknown +// if the access attribute has changed between state and config for cross-cluster API keys. +func SetUnknownIfAccessHasChanges() planmodifier.String { + return setUnknownIfAccessHasChanges{} +} + +type setUnknownIfAccessHasChanges struct{} + +func (s setUnknownIfAccessHasChanges) Description(ctx context.Context) string { + return "Sets the attribute value to unknown if the access attribute has changed for cross-cluster API keys" +} + +func (s setUnknownIfAccessHasChanges) MarkdownDescription(ctx context.Context) string { + return s.Description(ctx) +} + +func (s setUnknownIfAccessHasChanges) PlanModifyString(ctx context.Context, req planmodifier.StringRequest, resp *planmodifier.StringResponse) { + // Only apply this modifier if we have both state and config + if req.State.Raw.IsNull() || req.Config.Raw.IsNull() { + return + } + + // Get the type attribute to check if this is a cross-cluster API key + var keyType types.String + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, path.Root("type"), &keyType)...) + if resp.Diagnostics.HasError() { + return + } + + // Only apply to cross-cluster API keys + if keyType.ValueString() != crossClusterAPIKeyType { + return + } + + // Get the access attribute from state and config to check if it has changed + var stateAccess, configAccess types.Object + resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("access"), &stateAccess)...) + resp.Diagnostics.Append(req.Config.GetAttribute(ctx, path.Root("access"), &configAccess)...) + if resp.Diagnostics.HasError() { + return + } + + // If the access attribute has changed between state and config, set the current attribute to Unknown + if !stateAccess.Equal(configAccess) { + resp.PlanValue = types.StringUnknown() + } +} diff --git a/internal/elasticsearch/security/api_key/set_unknown_if_access_has_changes_test.go b/internal/elasticsearch/security/api_key/set_unknown_if_access_has_changes_test.go new file mode 100644 index 000000000..6a2b7712e --- /dev/null +++ b/internal/elasticsearch/security/api_key/set_unknown_if_access_has_changes_test.go @@ -0,0 +1,294 @@ +package api_key + +import ( + "context" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestSetUnknownIfAccessHasChanges(t *testing.T) { + t.Parallel() + + // Define the schema for testing + testSchema := schema.Schema{ + Attributes: map[string]schema.Attribute{ + "type": schema.StringAttribute{}, + "role_descriptors": schema.StringAttribute{}, + "access": schema.SingleNestedAttribute{ + Attributes: map[string]schema.Attribute{ + "search": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "names": schema.ListAttribute{ElementType: types.StringType}, + }, + }, + }, + "replication": schema.ListNestedAttribute{ + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "names": schema.ListAttribute{ElementType: types.StringType}, + }, + }, + }, + }, + }, + }, + } + + // Define object type for tftypes + objectType := tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "type": tftypes.String, + "role_descriptors": tftypes.String, + "access": tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "search": tftypes.List{ + ElementType: tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "names": tftypes.List{ElementType: tftypes.String}, + }, + }, + }, + "replication": tftypes.List{ + ElementType: tftypes.Object{ + AttributeTypes: map[string]tftypes.Type{ + "names": tftypes.List{ElementType: tftypes.String}, + }, + }, + }, + }, + }, + }, + } + + ctx := context.Background() + modifier := SetUnknownIfAccessHasChanges() + + t.Run("rest API key should not be affected", func(t *testing.T) { + // Create state and config values for rest API key + stateValues := map[string]tftypes.Value{ + "type": tftypes.NewValue(tftypes.String, "rest"), + "role_descriptors": tftypes.NewValue(tftypes.String, `{"test": "value"}`), + "access": tftypes.NewValue(objectType.AttributeTypes["access"], nil), + } + + configValues := map[string]tftypes.Value{ + "type": tftypes.NewValue(tftypes.String, "rest"), + "role_descriptors": tftypes.NewValue(tftypes.String, `{"test": "value"}`), + "access": tftypes.NewValue(objectType.AttributeTypes["access"], map[string]tftypes.Value{ + "search": tftypes.NewValue(tftypes.List{ElementType: tftypes.Object{AttributeTypes: map[string]tftypes.Type{"names": tftypes.List{ElementType: tftypes.String}}}}, nil), + "replication": tftypes.NewValue(tftypes.List{ElementType: tftypes.Object{AttributeTypes: map[string]tftypes.Type{"names": tftypes.List{ElementType: tftypes.String}}}}, nil), + }), + } + + stateRaw := tftypes.NewValue(objectType, stateValues) + configRaw := tftypes.NewValue(objectType, configValues) + + state := tfsdk.State{Raw: stateRaw, Schema: testSchema} + config := tfsdk.Config{Raw: configRaw, Schema: testSchema} + + req := planmodifier.StringRequest{ + Path: path.Root("role_descriptors"), + PlanValue: types.StringValue(`{"test": "value"}`), + ConfigValue: types.StringValue(`{"test": "value"}`), + StateValue: types.StringValue(`{"test": "value"}`), + Config: config, + State: state, + } + + resp := &planmodifier.StringResponse{} + + // Call the plan modifier + modifier.PlanModifyString(ctx, req, resp) + + // Check for errors + require.False(t, resp.Diagnostics.HasError(), "Plan modifier should not have errors: %v", resp.Diagnostics) + + // For rest type, role_descriptors should not be set to unknown + assert.False(t, resp.PlanValue.IsUnknown(), "Plan value should not be unknown for rest API key") + }) + + t.Run("cross_cluster with unchanged access should not set unknown", func(t *testing.T) { + // Create identical access for state and config (no change) + accessValue := tftypes.NewValue(objectType.AttributeTypes["access"], nil) + + stateValues := map[string]tftypes.Value{ + "type": tftypes.NewValue(tftypes.String, "cross_cluster"), + "role_descriptors": tftypes.NewValue(tftypes.String, `{"test": "value"}`), + "access": accessValue, + } + + configValues := map[string]tftypes.Value{ + "type": tftypes.NewValue(tftypes.String, "cross_cluster"), + "role_descriptors": tftypes.NewValue(tftypes.String, `{"test": "value"}`), + "access": accessValue, // Same as state + } + + stateRaw := tftypes.NewValue(objectType, stateValues) + configRaw := tftypes.NewValue(objectType, configValues) + + state := tfsdk.State{Raw: stateRaw, Schema: testSchema} + config := tfsdk.Config{Raw: configRaw, Schema: testSchema} + + req := planmodifier.StringRequest{ + Path: path.Root("role_descriptors"), + PlanValue: types.StringValue(`{"test": "value"}`), + ConfigValue: types.StringValue(`{"test": "value"}`), + StateValue: types.StringValue(`{"test": "value"}`), + Config: config, + State: state, + } + + resp := &planmodifier.StringResponse{} + + // Call the plan modifier + modifier.PlanModifyString(ctx, req, resp) + + // Check for errors + require.False(t, resp.Diagnostics.HasError(), "Plan modifier should not have errors: %v", resp.Diagnostics) + + // For unchanged access, role_descriptors should not be set to unknown + assert.False(t, resp.PlanValue.IsUnknown(), "Plan value should not be unknown when access doesn't change") + }) + + t.Run("cross_cluster with changed access should set unknown", func(t *testing.T) { + // State has null access + stateAccessValue := tftypes.NewValue(objectType.AttributeTypes["access"], nil) + + // Config has non-null access with search configuration + configAccessValue := tftypes.NewValue(objectType.AttributeTypes["access"], map[string]tftypes.Value{ + "search": tftypes.NewValue(tftypes.List{ElementType: tftypes.Object{AttributeTypes: map[string]tftypes.Type{"names": tftypes.List{ElementType: tftypes.String}}}}, []tftypes.Value{ + tftypes.NewValue(tftypes.Object{AttributeTypes: map[string]tftypes.Type{"names": tftypes.List{ElementType: tftypes.String}}}, map[string]tftypes.Value{ + "names": tftypes.NewValue(tftypes.List{ElementType: tftypes.String}, []tftypes.Value{ + tftypes.NewValue(tftypes.String, "index-*"), + }), + }), + }), + "replication": tftypes.NewValue(tftypes.List{ElementType: tftypes.Object{AttributeTypes: map[string]tftypes.Type{"names": tftypes.List{ElementType: tftypes.String}}}}, nil), + }) + + stateValues := map[string]tftypes.Value{ + "type": tftypes.NewValue(tftypes.String, "cross_cluster"), + "role_descriptors": tftypes.NewValue(tftypes.String, `{"test": "value"}`), + "access": stateAccessValue, + } + + configValues := map[string]tftypes.Value{ + "type": tftypes.NewValue(tftypes.String, "cross_cluster"), + "role_descriptors": tftypes.NewValue(tftypes.String, `{"test": "value"}`), + "access": configAccessValue, // Different from state + } + + stateRaw := tftypes.NewValue(objectType, stateValues) + configRaw := tftypes.NewValue(objectType, configValues) + + state := tfsdk.State{Raw: stateRaw, Schema: testSchema} + config := tfsdk.Config{Raw: configRaw, Schema: testSchema} + + req := planmodifier.StringRequest{ + Path: path.Root("role_descriptors"), + PlanValue: types.StringValue(`{"test": "value"}`), + ConfigValue: types.StringValue(`{"test": "value"}`), + StateValue: types.StringValue(`{"test": "value"}`), + Config: config, + State: state, + } + + resp := &planmodifier.StringResponse{} + + // Call the plan modifier + modifier.PlanModifyString(ctx, req, resp) + + // Check for errors + require.False(t, resp.Diagnostics.HasError(), "Plan modifier should not have errors: %v", resp.Diagnostics) + + // For changed access, role_descriptors should be set to unknown + assert.True(t, resp.PlanValue.IsUnknown(), "Plan value should be unknown when access changes for cross_cluster type") + }) + + t.Run("cross_cluster with different access configurations should set unknown", func(t *testing.T) { + // State has search configuration + stateAccessValue := tftypes.NewValue(objectType.AttributeTypes["access"], map[string]tftypes.Value{ + "search": tftypes.NewValue(tftypes.List{ElementType: tftypes.Object{AttributeTypes: map[string]tftypes.Type{"names": tftypes.List{ElementType: tftypes.String}}}}, []tftypes.Value{ + tftypes.NewValue(tftypes.Object{AttributeTypes: map[string]tftypes.Type{"names": tftypes.List{ElementType: tftypes.String}}}, map[string]tftypes.Value{ + "names": tftypes.NewValue(tftypes.List{ElementType: tftypes.String}, []tftypes.Value{ + tftypes.NewValue(tftypes.String, "old-index-*"), + }), + }), + }), + "replication": tftypes.NewValue(tftypes.List{ElementType: tftypes.Object{AttributeTypes: map[string]tftypes.Type{"names": tftypes.List{ElementType: tftypes.String}}}}, nil), + }) + + // Config has different search configuration + configAccessValue := tftypes.NewValue(objectType.AttributeTypes["access"], map[string]tftypes.Value{ + "search": tftypes.NewValue(tftypes.List{ElementType: tftypes.Object{AttributeTypes: map[string]tftypes.Type{"names": tftypes.List{ElementType: tftypes.String}}}}, []tftypes.Value{ + tftypes.NewValue(tftypes.Object{AttributeTypes: map[string]tftypes.Type{"names": tftypes.List{ElementType: tftypes.String}}}, map[string]tftypes.Value{ + "names": tftypes.NewValue(tftypes.List{ElementType: tftypes.String}, []tftypes.Value{ + tftypes.NewValue(tftypes.String, "new-index-*"), + }), + }), + }), + "replication": tftypes.NewValue(tftypes.List{ElementType: tftypes.Object{AttributeTypes: map[string]tftypes.Type{"names": tftypes.List{ElementType: tftypes.String}}}}, nil), + }) + + stateValues := map[string]tftypes.Value{ + "type": tftypes.NewValue(tftypes.String, "cross_cluster"), + "role_descriptors": tftypes.NewValue(tftypes.String, `{"test": "value"}`), + "access": stateAccessValue, + } + + configValues := map[string]tftypes.Value{ + "type": tftypes.NewValue(tftypes.String, "cross_cluster"), + "role_descriptors": tftypes.NewValue(tftypes.String, `{"test": "value"}`), + "access": configAccessValue, // Different from state + } + + stateRaw := tftypes.NewValue(objectType, stateValues) + configRaw := tftypes.NewValue(objectType, configValues) + + state := tfsdk.State{Raw: stateRaw, Schema: testSchema} + config := tfsdk.Config{Raw: configRaw, Schema: testSchema} + + req := planmodifier.StringRequest{ + Path: path.Root("role_descriptors"), + PlanValue: types.StringValue(`{"test": "value"}`), + ConfigValue: types.StringValue(`{"test": "value"}`), + StateValue: types.StringValue(`{"test": "value"}`), + Config: config, + State: state, + } + + resp := &planmodifier.StringResponse{} + + // Call the plan modifier + modifier.PlanModifyString(ctx, req, resp) + + // Check for errors + require.False(t, resp.Diagnostics.HasError(), "Plan modifier should not have errors: %v", resp.Diagnostics) + + // For changed access configuration, role_descriptors should be set to unknown + assert.True(t, resp.PlanValue.IsUnknown(), "Plan value should be unknown when access configuration changes") + }) + + t.Run("basic functionality tests", func(t *testing.T) { + // Test that the modifier can be created without errors + modifier := SetUnknownIfAccessHasChanges() + assert.NotNil(t, modifier, "Plan modifier should be created successfully") + + // Test the description method + desc := modifier.Description(ctx) + assert.NotEmpty(t, desc, "Description should not be empty") + + // Test the markdown description method + markdownDesc := modifier.MarkdownDescription(ctx) + assert.NotEmpty(t, markdownDesc, "Markdown description should not be empty") + }) +} diff --git a/internal/elasticsearch/security/api_key/state_upgrade.go b/internal/elasticsearch/security/api_key/state_upgrade.go index b9072fdde..9e31ec7a7 100644 --- a/internal/elasticsearch/security/api_key/state_upgrade.go +++ b/internal/elasticsearch/security/api_key/state_upgrade.go @@ -23,6 +23,20 @@ func (r *Resource) UpgradeState(context.Context) map[int64]resource.StateUpgrade model.Expiration = basetypes.NewStringNull() } + resp.State.Set(ctx, model) + }, + }, + 1: { + PriorSchema: utils.Pointer(r.getSchema(1)), + StateUpgrader: func(ctx context.Context, req resource.UpgradeStateRequest, resp *resource.UpgradeStateResponse) { + var model tfModel + resp.Diagnostics.Append(req.State.Get(ctx, &model)...) + if resp.Diagnostics.HasError() { + return + } + + model.Type = basetypes.NewStringValue(defaultAPIKeyType) + resp.State.Set(ctx, model) }, }, diff --git a/internal/elasticsearch/security/api_key/update.go b/internal/elasticsearch/security/api_key/update.go index 6cd1d3cb1..ea0dd3c84 100644 --- a/internal/elasticsearch/security/api_key/update.go +++ b/internal/elasticsearch/security/api_key/update.go @@ -5,6 +5,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch" + "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/resource" ) @@ -21,13 +22,14 @@ func (r *Resource) Update(ctx context.Context, req resource.UpdateRequest, resp return } - apiModel, diags := r.buildApiModel(ctx, planModel, client) - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return + if planModel.Type.ValueString() == "cross_cluster" { + updateDiags := r.updateCrossClusterApiKey(ctx, client, planModel) + resp.Diagnostics.Append(updateDiags...) + } else { + updateDiags := r.updateApiKey(ctx, client, planModel) + resp.Diagnostics.Append(updateDiags...) } - resp.Diagnostics.Append(elasticsearch.UpdateApiKey(client, apiModel)...) if resp.Diagnostics.HasError() { return } @@ -40,3 +42,25 @@ func (r *Resource) Update(ctx context.Context, req resource.UpdateRequest, resp resp.Diagnostics.Append(resp.State.Set(ctx, *finalModel)...) } + +func (r *Resource) updateCrossClusterApiKey(ctx context.Context, client *clients.ApiClient, planModel tfModel) diag.Diagnostics { + // Handle cross-cluster API key update + crossClusterModel, modelDiags := planModel.toCrossClusterAPIModel(ctx) + if modelDiags.HasError() { + return modelDiags + } + + updateDiags := elasticsearch.UpdateCrossClusterApiKey(client, crossClusterModel) + return diag.Diagnostics(updateDiags) +} + +func (r *Resource) updateApiKey(ctx context.Context, client *clients.ApiClient, planModel tfModel) diag.Diagnostics { + // Handle regular API key update + apiModel, modelDiags := r.buildApiModel(ctx, planModel, client) + if modelDiags.HasError() { + return modelDiags + } + + updateDiags := elasticsearch.UpdateApiKey(client, apiModel) + return diag.Diagnostics(updateDiags) +} diff --git a/internal/elasticsearch/security/api_key/validators.go b/internal/elasticsearch/security/api_key/validators.go new file mode 100644 index 000000000..d92792d14 --- /dev/null +++ b/internal/elasticsearch/security/api_key/validators.go @@ -0,0 +1,85 @@ +package api_key + +import ( + "context" + "fmt" + + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" +) + +var ( + _ validator.String = requiresTypeValidator{} + _ validator.Object = requiresTypeValidator{} +) + +// requiresTypeValidator validates that a string attribute is only provided +// when the resource has a specific value for the "type" attribute. +type requiresTypeValidator struct { + expectedType string +} + +// RequiresType returns a validator which ensures that the configured attribute +// is only provided when the "type" attribute matches the expected value. +func RequiresType(expectedType string) requiresTypeValidator { + return requiresTypeValidator{ + expectedType: expectedType, + } +} + +func (validator requiresTypeValidator) Description(_ context.Context) string { + return fmt.Sprintf("Ensures that the attribute is only provided when type=%s", validator.expectedType) +} + +func (validator requiresTypeValidator) MarkdownDescription(ctx context.Context) string { + return validator.Description(ctx) +} + +// validateType contains the common validation logic for both string and object validators +func (validator requiresTypeValidator) validateType(ctx context.Context, config tfsdk.Config, attrPath path.Path, diagnostics *diag.Diagnostics) bool { + // Get the type attribute value from the same configuration object + var typeAttr *string + diags := config.GetAttribute(ctx, path.Root("type"), &typeAttr) + diagnostics.Append(diags...) + if diagnostics.HasError() { + return false + } + + // If type is unknown or empty, we can't validate + if typeAttr == nil { + return true + } + + // Check if the current type matches the expected type + if *typeAttr != validator.expectedType { + diagnostics.AddAttributeError( + attrPath, + fmt.Sprintf("Attribute not valid for API key type '%s'", *typeAttr), + fmt.Sprintf("The %s attribute can only be used when type='%s', but type='%s' was specified.", + attrPath.String(), validator.expectedType, *typeAttr), + ) + return false + } + + return true +} + +func (validator requiresTypeValidator) ValidateObject(ctx context.Context, req validator.ObjectRequest, resp *validator.ObjectResponse) { + // If the attribute is null or unknown, there's nothing to validate + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + validator.validateType(ctx, req.Config, req.Path, &resp.Diagnostics) +} + +func (validator requiresTypeValidator) ValidateString(ctx context.Context, req validator.StringRequest, resp *validator.StringResponse) { + // If the attribute is null or unknown, there's nothing to validate + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + validator.validateType(ctx, req.Config, req.Path, &resp.Diagnostics) +} diff --git a/internal/elasticsearch/security/api_key/validators_test.go b/internal/elasticsearch/security/api_key/validators_test.go new file mode 100644 index 000000000..b52965177 --- /dev/null +++ b/internal/elasticsearch/security/api_key/validators_test.go @@ -0,0 +1,97 @@ +package api_key + +import ( + "context" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-go/tftypes" +) + +func TestRequiresTypeValidator(t *testing.T) { + t.Parallel() + + type testCase struct { + name string + typeValue string + attrValue string + expectError bool + } + + testCases := []testCase{ + { + name: "role_descriptors with type=rest should be valid", + typeValue: "rest", + attrValue: `{"role": {"cluster": ["all"]}}`, + expectError: false, + }, + { + name: "role_descriptors with type=cross_cluster should be invalid", + typeValue: "cross_cluster", + attrValue: `{"role": {"cluster": ["all"]}}`, + expectError: true, + }, + { + name: "null role_descriptors with type=cross_cluster should be valid", + typeValue: "cross_cluster", + attrValue: "", + expectError: false, + }, + } + + for _, testCase := range testCases { + t.Run(testCase.name, func(t *testing.T) { + // Create test config values + configValues := map[string]tftypes.Value{ + "type": tftypes.NewValue(tftypes.String, testCase.typeValue), + } + + if testCase.attrValue != "" { + configValues["role_descriptors"] = tftypes.NewValue(tftypes.String, testCase.attrValue) + } else { + configValues["role_descriptors"] = tftypes.NewValue(tftypes.String, nil) + } + + config := tfsdk.Config{ + Raw: tftypes.NewValue(tftypes.Object{AttributeTypes: map[string]tftypes.Type{ + "type": tftypes.String, + "role_descriptors": tftypes.String, + }}, configValues), + Schema: schema.Schema{ + Attributes: map[string]schema.Attribute{ + "type": schema.StringAttribute{}, + "role_descriptors": schema.StringAttribute{}, + }, + }, + } + + var configValue types.String + if testCase.attrValue != "" { + configValue = types.StringValue(testCase.attrValue) + } else { + configValue = types.StringNull() + } + + request := validator.StringRequest{ + Path: path.Root("role_descriptors"), + ConfigValue: configValue, + Config: config, + } + + response := &validator.StringResponse{} + RequiresType("rest").ValidateString(context.Background(), request, response) + + if testCase.expectError && !response.Diagnostics.HasError() { + t.Errorf("Expected error but got none") + } + + if !testCase.expectError && response.Diagnostics.HasError() { + t.Errorf("Expected no error but got: %v", response.Diagnostics) + } + }) + } +} diff --git a/internal/models/models.go b/internal/models/models.go index 5608df749..b95efbc64 100644 --- a/internal/models/models.go +++ b/internal/models/models.go @@ -126,12 +126,42 @@ type ApiKeyCreateResponse struct { type ApiKeyResponse struct { ApiKey + Type string `json:"type,omitempty"` RolesDescriptors map[string]ApiKeyRoleDescriptor `json:"role_descriptors,omitempty"` Expiration int64 `json:"expiration,omitempty"` Id string `json:"id,omitempty"` Key string `json:"api_key,omitempty"` EncodedKey string `json:"encoded,omitempty"` Invalidated bool `json:"invalidated,omitempty"` + Access *CrossClusterApiKeyAccess `json:"access,omitempty"` +} + +type CrossClusterApiKeyAccess struct { + Search []CrossClusterApiKeyAccessEntry `json:"search,omitempty"` + Replication []CrossClusterApiKeyAccessEntry `json:"replication,omitempty"` +} + +type CrossClusterApiKeyAccessEntry struct { + Names []string `json:"names"` + FieldSecurity *FieldSecurity `json:"field_security,omitempty"` + Query *string `json:"query,omitempty"` + AllowRestrictedIndices *bool `json:"allow_restricted_indices,omitempty"` +} + +type CrossClusterApiKey struct { + ID string `json:"-"` + Name string `json:"name,omitempty"` + Expiration string `json:"expiration,omitempty"` + Access *CrossClusterApiKeyAccess `json:"access,omitempty"` + Metadata map[string]interface{} `json:"metadata,omitempty"` +} + +type CrossClusterApiKeyCreateResponse struct { + Id string `json:"id,omitempty"` + Name string `json:"name"` + Key string `json:"api_key,omitempty"` + EncodedKey string `json:"encoded,omitempty"` + Expiration int64 `json:"expiration,omitempty"` } type IndexPerms struct { From b33ee5020a5fadcff8ffd1204bdb332730d12c12 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sun, 7 Sep 2025 19:25:09 +0000 Subject: [PATCH 40/66] chore(deps): update docker.elastic.co/elasticsearch/elasticsearch docker tag to v9.1.3 (#1271) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- libs/go-kibana-rest/docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/go-kibana-rest/docker-compose.yml b/libs/go-kibana-rest/docker-compose.yml index 5ac0162e0..9374bdd1c 100644 --- a/libs/go-kibana-rest/docker-compose.yml +++ b/libs/go-kibana-rest/docker-compose.yml @@ -1,6 +1,6 @@ services: elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:9.1.2@sha256:d1a8016cf55be8ffec635ed69f5a9acb0c459db35b46a4549ec5b2847a2f170a + image: docker.elastic.co/elasticsearch/elasticsearch:9.1.3@sha256:d620e80b7222e32eff42ac0fc614a0a5753f6ce9859df33b9ddea49c3bf5fb01 environment: cluster.name: test discovery.type: single-node From 0baefee5696b49dcd2718523d48378bf900b6ec0 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Mon, 8 Sep 2025 07:48:31 +1000 Subject: [PATCH 41/66] Migrate Elasticsearch role mapping resource and data source to Plugin Framework (#1280) * Initial plan * Implement Plugin Framework role mapping resource and data source Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Apply code formatting * Remove old SDKv2 registrations and generate docs * Address review comments: extract read logic, use normalized JSON types, use framework diagnostics, and cleanup Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Address review comments: simplify diagnostics handling and add normalized JSON types to data source Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Address review feedback: use utils.SetValueFrom and utils.SetTypeAs for role handling Co-authored-by: tobio <444668+tobio@users.noreply.github.com> --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: tobio <444668+tobio@users.noreply.github.com> --- CHANGELOG.md | 1 + .../elasticsearch_security_role_mapping.md | 2 +- .../elasticsearch_security_role_mapping.md | 2 +- internal/clients/elasticsearch/security.go | 53 +++-- .../elasticsearch/security/role_mapping.go | 198 ------------------ .../acc_test.go} | 118 +++++++---- .../security/role_mapping/create.go | 11 + .../security/role_mapping/data_source.go | 116 ++++++++++ .../security/role_mapping/data_source_test.go | 54 +++++ .../security/role_mapping/delete.go | 31 +++ .../security/role_mapping/models.go | 17 ++ .../security/role_mapping/read.go | 116 ++++++++++ .../security/role_mapping/resource.go | 26 +++ .../security/role_mapping/schema.go | 84 ++++++++ .../security/role_mapping/update.go | 92 ++++++++ .../security/role_mapping_data_source.go | 77 ------- provider/plugin_framework.go | 3 + provider/provider.go | 30 ++- 18 files changed, 676 insertions(+), 355 deletions(-) delete mode 100644 internal/elasticsearch/security/role_mapping.go rename internal/elasticsearch/security/{role_mapping_test.go => role_mapping/acc_test.go} (72%) create mode 100644 internal/elasticsearch/security/role_mapping/create.go create mode 100644 internal/elasticsearch/security/role_mapping/data_source.go create mode 100644 internal/elasticsearch/security/role_mapping/data_source_test.go create mode 100644 internal/elasticsearch/security/role_mapping/delete.go create mode 100644 internal/elasticsearch/security/role_mapping/models.go create mode 100644 internal/elasticsearch/security/role_mapping/read.go create mode 100644 internal/elasticsearch/security/role_mapping/resource.go create mode 100644 internal/elasticsearch/security/role_mapping/schema.go create mode 100644 internal/elasticsearch/security/role_mapping/update.go delete mode 100644 internal/elasticsearch/security/role_mapping_data_source.go diff --git a/CHANGELOG.md b/CHANGELOG.md index ad0798e5f..3ffbadeef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ - Add support for managing cross_cluster API keys in `elasticstack_elasticsearch_security_api_key` ([#1252](https://github.com/elastic/terraform-provider-elasticstack/pull/1252)) - Allow version changes without a destroy/create cycle with `elasticstack_fleet_integration` ([#1255](https://github.com/elastic/terraform-provider-elasticstack/pull/1255)). This fixes an issue where it was impossible to upgrade integrations which are used by an integration policy. - Add `namespace` attribute to `elasticstack_kibana_synthetics_monitor` resource to support setting data stream namespace independently from `space_id` ([#1247](https://github.com/elastic/terraform-provider-elasticstack/pull/1247)) +- Migrate `elasticstack_elasticsearch_security_role_mapping` resource and data source to Terraform Plugin Framework ([#1279](https://github.com/elastic/terraform-provider-elasticstack/pull/1279)) ## [0.11.17] - 2025-07-21 diff --git a/docs/data-sources/elasticsearch_security_role_mapping.md b/docs/data-sources/elasticsearch_security_role_mapping.md index ae93d8aaf..01c6ca5ab 100644 --- a/docs/data-sources/elasticsearch_security_role_mapping.md +++ b/docs/data-sources/elasticsearch_security_role_mapping.md @@ -35,7 +35,7 @@ output "user" { ### Optional -- `elasticsearch_connection` (Block List, Max: 1, Deprecated) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. (see [below for nested schema](#nestedblock--elasticsearch_connection)) +- `elasticsearch_connection` (Block List, Deprecated) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch_connection)) ### Read-Only diff --git a/docs/resources/elasticsearch_security_role_mapping.md b/docs/resources/elasticsearch_security_role_mapping.md index c3248fe24..12758977d 100644 --- a/docs/resources/elasticsearch_security_role_mapping.md +++ b/docs/resources/elasticsearch_security_role_mapping.md @@ -46,7 +46,7 @@ output "role" { ### Optional -- `elasticsearch_connection` (Block List, Max: 1, Deprecated) Elasticsearch connection configuration block. This property will be removed in a future provider version. Configure the Elasticsearch connection via the provider configuration instead. (see [below for nested schema](#nestedblock--elasticsearch_connection)) +- `elasticsearch_connection` (Block List, Deprecated) Elasticsearch connection configuration block. (see [below for nested schema](#nestedblock--elasticsearch_connection)) - `enabled` (Boolean) Mappings that have `enabled` set to `false` are ignored when role mapping is performed. - `metadata` (String) Additional metadata that helps define which roles are assigned to each user. Keys beginning with `_` are reserved for system usage. - `role_templates` (String) A list of mustache templates that will be evaluated to determine the roles names that should granted to the users that match the role mapping rules. diff --git a/internal/clients/elasticsearch/security.go b/internal/clients/elasticsearch/security.go index bfeb5e3e5..d35cd7bc2 100644 --- a/internal/clients/elasticsearch/security.go +++ b/internal/clients/elasticsearch/security.go @@ -252,73 +252,88 @@ func DeleteRole(ctx context.Context, apiClient *clients.ApiClient, rolename stri return diags } -func PutRoleMapping(ctx context.Context, apiClient *clients.ApiClient, roleMapping *models.RoleMapping) diag.Diagnostics { +func PutRoleMapping(ctx context.Context, apiClient *clients.ApiClient, roleMapping *models.RoleMapping) fwdiag.Diagnostics { + var diags fwdiag.Diagnostics roleMappingBytes, err := json.Marshal(roleMapping) if err != nil { - return diag.FromErr(err) + diags.AddError("Unable to marshal role mapping", err.Error()) + return diags } esClient, err := apiClient.GetESClient() if err != nil { - return diag.FromErr(err) + diags.AddError("Unable to get Elasticsearch client", err.Error()) + return diags } res, err := esClient.Security.PutRoleMapping(roleMapping.Name, bytes.NewReader(roleMappingBytes), esClient.Security.PutRoleMapping.WithContext(ctx)) if err != nil { - return diag.FromErr(err) + diags.AddError("Unable to put role mapping", err.Error()) + return diags } defer res.Body.Close() - if diags := utils.CheckError(res, "Unable to put role mapping"); diags.HasError() { + if sdkDiags := utils.CheckError(res, "Unable to put role mapping"); sdkDiags.HasError() { + diags.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) return diags } - return nil + return diags } -func GetRoleMapping(ctx context.Context, apiClient *clients.ApiClient, roleMappingName string) (*models.RoleMapping, diag.Diagnostics) { +func GetRoleMapping(ctx context.Context, apiClient *clients.ApiClient, roleMappingName string) (*models.RoleMapping, fwdiag.Diagnostics) { + var diags fwdiag.Diagnostics esClient, err := apiClient.GetESClient() if err != nil { - return nil, diag.FromErr(err) + diags.AddError("Unable to get Elasticsearch client", err.Error()) + return nil, diags } req := esClient.Security.GetRoleMapping.WithName(roleMappingName) res, err := esClient.Security.GetRoleMapping(req, esClient.Security.GetRoleMapping.WithContext(ctx)) if err != nil { - return nil, diag.FromErr(err) + diags.AddError("Unable to get role mapping", err.Error()) + return nil, diags } defer res.Body.Close() if res.StatusCode == http.StatusNotFound { - return nil, nil + return nil, diags } - if diags := utils.CheckError(res, "Unable to get a role mapping."); diags.HasError() { + if sdkDiags := utils.CheckError(res, "Unable to get a role mapping."); sdkDiags.HasError() { + diags.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) return nil, diags } roleMappings := make(map[string]models.RoleMapping) if err := json.NewDecoder(res.Body).Decode(&roleMappings); err != nil { - return nil, diag.FromErr(err) + diags.AddError("Unable to decode role mapping response", err.Error()) + return nil, diags } if roleMapping, ok := roleMappings[roleMappingName]; ok { roleMapping.Name = roleMappingName - return &roleMapping, nil + return &roleMapping, diags } - return nil, diag.Errorf("unable to find role mapping '%s' in the cluster", roleMappingName) + diags.AddError("Role mapping not found", fmt.Sprintf("unable to find role mapping '%s' in the cluster", roleMappingName)) + return nil, diags } -func DeleteRoleMapping(ctx context.Context, apiClient *clients.ApiClient, roleMappingName string) diag.Diagnostics { +func DeleteRoleMapping(ctx context.Context, apiClient *clients.ApiClient, roleMappingName string) fwdiag.Diagnostics { + var diags fwdiag.Diagnostics esClient, err := apiClient.GetESClient() if err != nil { - return diag.FromErr(err) + diags.AddError("Unable to get Elasticsearch client", err.Error()) + return diags } res, err := esClient.Security.DeleteRoleMapping(roleMappingName, esClient.Security.DeleteRoleMapping.WithContext(ctx)) if err != nil { - return diag.FromErr(err) + diags.AddError("Unable to delete role mapping", err.Error()) + return diags } defer res.Body.Close() - if diags := utils.CheckError(res, "Unable to delete role mapping"); diags.HasError() { + if sdkDiags := utils.CheckError(res, "Unable to delete role mapping"); sdkDiags.HasError() { + diags.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) return diags } - return nil + return diags } func CreateApiKey(apiClient *clients.ApiClient, apikey *models.ApiKey) (*models.ApiKeyCreateResponse, fwdiag.Diagnostics) { diff --git a/internal/elasticsearch/security/role_mapping.go b/internal/elasticsearch/security/role_mapping.go deleted file mode 100644 index fa1f7c780..000000000 --- a/internal/elasticsearch/security/role_mapping.go +++ /dev/null @@ -1,198 +0,0 @@ -package security - -import ( - "context" - "encoding/json" - "fmt" - - "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch" - "github.com/elastic/terraform-provider-elasticstack/internal/models" - "github.com/elastic/terraform-provider-elasticstack/internal/utils" - "github.com/hashicorp/terraform-plugin-log/tflog" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" -) - -func ResourceRoleMapping() *schema.Resource { - roleMappingSchema := map[string]*schema.Schema{ - "id": { - Description: "Internal identifier of the resource", - Type: schema.TypeString, - Computed: true, - }, - "name": { - Type: schema.TypeString, - Required: true, - Description: "The distinct name that identifies the role mapping, used solely as an identifier.", - ForceNew: true, - }, - "enabled": { - Type: schema.TypeBool, - Optional: true, - Default: true, - Description: "Mappings that have `enabled` set to `false` are ignored when role mapping is performed.", - }, - "rules": { - Type: schema.TypeString, - Required: true, - DiffSuppressFunc: utils.DiffJsonSuppress, - Description: "The rules that determine which users should be matched by the mapping. A rule is a logical condition that is expressed by using a JSON DSL.", - }, - "roles": { - Type: schema.TypeSet, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Description: "A list of role names that are granted to the users that match the role mapping rules.", - Optional: true, - ConflictsWith: []string{"role_templates"}, - ExactlyOneOf: []string{"roles", "role_templates"}, - }, - "role_templates": { - Type: schema.TypeString, - DiffSuppressFunc: utils.DiffJsonSuppress, - Description: "A list of mustache templates that will be evaluated to determine the roles names that should granted to the users that match the role mapping rules.", - Optional: true, - ConflictsWith: []string{"roles"}, - ExactlyOneOf: []string{"roles", "role_templates"}, - }, - "metadata": { - Type: schema.TypeString, - Optional: true, - Default: "{}", - DiffSuppressFunc: utils.DiffJsonSuppress, - Description: "Additional metadata that helps define which roles are assigned to each user. Keys beginning with `_` are reserved for system usage.", - }, - } - - utils.AddConnectionSchema(roleMappingSchema) - - return &schema.Resource{ - Description: "Manage role mappings. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role-mapping.html", - - CreateContext: resourceSecurityRoleMappingPut, - UpdateContext: resourceSecurityRoleMappingPut, - ReadContext: resourceSecurityRoleMappingRead, - DeleteContext: resourceSecurityRoleMappingDelete, - - Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, - }, - - Schema: roleMappingSchema, - } -} - -func resourceSecurityRoleMappingPut(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - roleMappingName := d.Get("name").(string) - id, diags := client.ID(ctx, roleMappingName) - if diags.HasError() { - return diags - } - - var rules map[string]interface{} - if err := json.Unmarshal([]byte(d.Get("rules").(string)), &rules); err != nil { - return diag.FromErr(err) - } - - var roleTemplates []map[string]interface{} - if t, ok := d.GetOk("role_templates"); ok && t.(string) != "" { - if err := json.Unmarshal([]byte(t.(string)), &roleTemplates); err != nil { - return diag.FromErr(err) - } - } - - roleMapping := models.RoleMapping{ - Name: roleMappingName, - Enabled: d.Get("enabled").(bool), - Roles: utils.ExpandStringSet(d.Get("roles").(*schema.Set)), - RoleTemplates: roleTemplates, - Rules: rules, - Metadata: json.RawMessage(d.Get("metadata").(string)), - } - if diags := elasticsearch.PutRoleMapping(ctx, client, &roleMapping); diags.HasError() { - return diags - } - d.SetId(id.String()) - - return resourceSecurityRoleMappingRead(ctx, d, meta) -} - -func resourceSecurityRoleMappingRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - resourceID, diags := clients.ResourceIDFromStr(d.Id()) - if diags.HasError() { - return diags - } - roleMapping, diags := elasticsearch.GetRoleMapping(ctx, client, resourceID) - if roleMapping == nil && diags == nil { - tflog.Warn(ctx, fmt.Sprintf(`Role mapping "%s" not found, removing from state`, resourceID)) - d.SetId("") - return diags - } - if diags.HasError() { - return diags - } - - rules, err := json.Marshal(roleMapping.Rules) - if err != nil { - return diag.FromErr(err) - } - - metadata, err := json.Marshal(roleMapping.Metadata) - if err != nil { - return diag.FromErr(err) - } - - if err := d.Set("name", roleMapping.Name); err != nil { - return diag.FromErr(err) - } - if len(roleMapping.Roles) > 0 { - if err := d.Set("roles", roleMapping.Roles); err != nil { - return diag.FromErr(err) - } - } - if len(roleMapping.RoleTemplates) > 0 { - roleTemplates, err := json.Marshal(roleMapping.RoleTemplates) - if err != nil { - return diag.FromErr(err) - } - - if err := d.Set("role_templates", string(roleTemplates)); err != nil { - return diag.FromErr(err) - } - } - if err := d.Set("enabled", roleMapping.Enabled); err != nil { - return diag.FromErr(err) - } - if err := d.Set("rules", string(rules)); err != nil { - return diag.FromErr(err) - } - if err := d.Set("metadata", string(metadata)); err != nil { - return diag.FromErr(err) - } - return nil -} - -func resourceSecurityRoleMappingDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - resourceID, diags := clients.ResourceIDFromStr(d.Id()) - if diags.HasError() { - return diags - } - if diags := elasticsearch.DeleteRoleMapping(ctx, client, resourceID); diags.HasError() { - return diags - } - return nil -} diff --git a/internal/elasticsearch/security/role_mapping_test.go b/internal/elasticsearch/security/role_mapping/acc_test.go similarity index 72% rename from internal/elasticsearch/security/role_mapping_test.go rename to internal/elasticsearch/security/role_mapping/acc_test.go index 83c06e162..24dc5d49e 100644 --- a/internal/elasticsearch/security/role_mapping_test.go +++ b/internal/elasticsearch/security/role_mapping/acc_test.go @@ -1,4 +1,4 @@ -package security_test +package role_mapping_test import ( "fmt" @@ -13,7 +13,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/terraform" ) -func TestResourceRoleMapping(t *testing.T) { +func TestAccResourceSecurityRoleMapping(t *testing.T) { roleMappingName := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) resource.Test(t, resource.TestCase{ PreCheck: func() { acctest.PreCheck(t) }, @@ -54,6 +54,68 @@ func TestResourceRoleMapping(t *testing.T) { }) } +func TestAccResourceSecurityRoleMappingFromSDK(t *testing.T) { + roleMappingName := sdkacctest.RandStringFromCharSet(10, sdkacctest.CharSetAlphaNum) + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + Steps: []resource.TestStep{ + { + // Create the role mapping with the last provider version where the role mapping resource was built on the SDK + ExternalProviders: map[string]resource.ExternalProvider{ + "elasticstack": { + Source: "elastic/elasticstack", + VersionConstraint: "0.11.17", + }, + }, + Config: testAccResourceSecurityRoleMappingCreate(roleMappingName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_role_mapping.test", "name", roleMappingName), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_role_mapping.test", "enabled", "true"), + checks.TestCheckResourceListAttr("elasticstack_elasticsearch_security_role_mapping.test", "roles", []string{"admin"}), + ), + }, + { + ProtoV6ProviderFactories: acctest.Providers, + Config: testAccResourceSecurityRoleMappingCreate(roleMappingName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_role_mapping.test", "name", roleMappingName), + resource.TestCheckResourceAttr("elasticstack_elasticsearch_security_role_mapping.test", "enabled", "true"), + checks.TestCheckResourceListAttr("elasticstack_elasticsearch_security_role_mapping.test", "roles", []string{"admin"}), + ), + }, + }, + }) +} + +func checkResourceSecurityRoleMappingDestroy(s *terraform.State) error { + client, err := clients.NewAcceptanceTestingClient() + if err != nil { + return err + } + + for _, rs := range s.RootModule().Resources { + if rs.Type != "elasticstack_elasticsearch_security_role_mapping" { + continue + } + compId, _ := clients.CompositeIdFromStr(rs.Primary.ID) + + esClient, err := client.GetESClient() + if err != nil { + return err + } + req := esClient.Security.GetRoleMapping.WithName(compId.ResourceId) + res, err := esClient.Security.GetRoleMapping(req) + if err != nil { + return err + } + + if res.StatusCode != http.StatusNotFound { + return fmt.Errorf("Role mapping (%s) still exists", compId.ResourceId) + } + } + return nil +} + func testAccResourceSecurityRoleMappingCreate(roleMappingName string) string { return fmt.Sprintf(` provider "elasticstack" { @@ -61,11 +123,9 @@ provider "elasticstack" { } resource "elasticstack_elasticsearch_security_role_mapping" "test" { - name = "%s" + name = "%s" enabled = true - roles = [ - "admin" - ] + roles = ["admin"] rules = jsonencode({ any = [ { field = { username = "esadmin" } }, @@ -85,18 +145,17 @@ provider "elasticstack" { } resource "elasticstack_elasticsearch_security_role_mapping" "test" { - name = "%s" + name = "%s" enabled = false - roles = [ - "admin", - "user" - ] + roles = ["admin", "user"] rules = jsonencode({ any = [ { field = { username = "esadmin" } }, { field = { groups = "cn=admins,dc=example,dc=com" } }, ] }) + + metadata = jsonencode({}) } `, roleMappingName) } @@ -108,12 +167,12 @@ provider "elasticstack" { } resource "elasticstack_elasticsearch_security_role_mapping" "test" { - name = "%s" + name = "%s" enabled = false role_templates = jsonencode([ { - template = jsonencode({ source = "{{#tojson}}groups{{/tojson}}" }), - format = "json" + format = "json" + template = "{\"source\":\"{{#tojson}}groups{{/tojson}}\"}" } ]) rules = jsonencode({ @@ -122,35 +181,8 @@ resource "elasticstack_elasticsearch_security_role_mapping" "test" { { field = { groups = "cn=admins,dc=example,dc=com" } }, ] }) + + metadata = jsonencode({}) } `, roleMappingName) } - -func checkResourceSecurityRoleMappingDestroy(s *terraform.State) error { - client, err := clients.NewAcceptanceTestingClient() - if err != nil { - return err - } - - for _, rs := range s.RootModule().Resources { - if rs.Type != "elasticstack_elasticsearch_security_role_mapping" { - continue - } - compId, _ := clients.CompositeIdFromStr(rs.Primary.ID) - - esClient, err := client.GetESClient() - if err != nil { - return err - } - req := esClient.Security.GetRoleMapping.WithName(compId.ResourceId) - res, err := esClient.Security.GetRoleMapping(req) - if err != nil { - return err - } - - if res.StatusCode != http.StatusNotFound { - return fmt.Errorf("role mapping (%s) still exists", compId.ResourceId) - } - } - return nil -} diff --git a/internal/elasticsearch/security/role_mapping/create.go b/internal/elasticsearch/security/role_mapping/create.go new file mode 100644 index 000000000..84e3d131e --- /dev/null +++ b/internal/elasticsearch/security/role_mapping/create.go @@ -0,0 +1,11 @@ +package role_mapping + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +func (r *roleMappingResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + resp.Diagnostics.Append(r.update(ctx, req.Plan, &resp.State)...) +} diff --git a/internal/elasticsearch/security/role_mapping/data_source.go b/internal/elasticsearch/security/role_mapping/data_source.go new file mode 100644 index 000000000..13dcd9641 --- /dev/null +++ b/internal/elasticsearch/security/role_mapping/data_source.go @@ -0,0 +1,116 @@ +package role_mapping + +import ( + "context" + "fmt" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + providerschema "github.com/elastic/terraform-provider-elasticstack/internal/schema" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func NewRoleMappingDataSource() datasource.DataSource { + return &roleMappingDataSource{} +} + +type roleMappingDataSource struct { + client *clients.ApiClient +} + +func (d *roleMappingDataSource) Metadata(_ context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_elasticsearch_security_role_mapping" +} + +func (d *roleMappingDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "Retrieves role mappings. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html", + Blocks: map[string]schema.Block{ + "elasticsearch_connection": providerschema.GetEsFWConnectionBlock("elasticsearch_connection", false), + }, + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + MarkdownDescription: "Internal identifier of the resource", + Computed: true, + }, + "name": schema.StringAttribute{ + MarkdownDescription: "The distinct name that identifies the role mapping, used solely as an identifier.", + Required: true, + }, + "enabled": schema.BoolAttribute{ + MarkdownDescription: "Mappings that have `enabled` set to `false` are ignored when role mapping is performed.", + Computed: true, + }, + "rules": schema.StringAttribute{ + MarkdownDescription: "The rules that determine which users should be matched by the mapping. A rule is a logical condition that is expressed by using a JSON DSL.", + Computed: true, + CustomType: jsontypes.NormalizedType{}, + }, + "roles": schema.SetAttribute{ + MarkdownDescription: "A list of role names that are granted to the users that match the role mapping rules.", + ElementType: types.StringType, + Computed: true, + }, + "role_templates": schema.StringAttribute{ + MarkdownDescription: "A list of mustache templates that will be evaluated to determine the roles names that should granted to the users that match the role mapping rules.", + Computed: true, + CustomType: jsontypes.NormalizedType{}, + }, + "metadata": schema.StringAttribute{ + MarkdownDescription: "Additional metadata that helps define which roles are assigned to each user. Keys beginning with `_` are reserved for system usage.", + Computed: true, + CustomType: jsontypes.NormalizedType{}, + }, + }, + } +} + +func (d *roleMappingDataSource) Configure(_ context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + client, diags := clients.ConvertProviderData(req.ProviderData) + resp.Diagnostics.Append(diags...) + d.client = client +} + +func (d *roleMappingDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data RoleMappingData + resp.Diagnostics.Append(req.Config.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + roleMappingName := data.Name.ValueString() + + client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, data.ElasticsearchConnection, d.client) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + id, sdkDiags := client.ID(ctx, roleMappingName) + resp.Diagnostics.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) + if resp.Diagnostics.HasError() { + return + } + + data.Id = types.StringValue(id.String()) + + // Use the extracted read function + readData, readDiags := readRoleMapping(ctx, client, roleMappingName, data.ElasticsearchConnection) + resp.Diagnostics.Append(readDiags...) + if resp.Diagnostics.HasError() { + return + } + + if readData == nil { + resp.Diagnostics.AddError( + "Role mapping not found", + fmt.Sprintf("Role mapping '%s' not found", roleMappingName), + ) + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, readData)...) +} diff --git a/internal/elasticsearch/security/role_mapping/data_source_test.go b/internal/elasticsearch/security/role_mapping/data_source_test.go new file mode 100644 index 000000000..2e4c80dc3 --- /dev/null +++ b/internal/elasticsearch/security/role_mapping/data_source_test.go @@ -0,0 +1,54 @@ +package role_mapping_test + +import ( + "testing" + + "github.com/elastic/terraform-provider-elasticstack/internal/acctest" + "github.com/elastic/terraform-provider-elasticstack/internal/acctest/checks" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" +) + +func TestAccDataSourceSecurityRoleMapping(t *testing.T) { + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + Config: testAccDataSourceSecurityRoleMapping, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_security_role_mapping.test", "name", "data_source_test"), + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_security_role_mapping.test", "enabled", "true"), + checks.TestCheckResourceListAttr("data.elasticstack_elasticsearch_security_role_mapping.test", "roles", []string{"admin"}), + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_security_role_mapping.test", "rules", `{"any":[{"field":{"username":"esadmin"}},{"field":{"groups":"cn=admins,dc=example,dc=com"}}]}`), + resource.TestCheckResourceAttr("data.elasticstack_elasticsearch_security_role_mapping.test", "metadata", `{"version":1}`), + ), + }, + }, + }) +} + +const testAccDataSourceSecurityRoleMapping = ` +provider "elasticstack" { + elasticsearch {} +} + +resource "elasticstack_elasticsearch_security_role_mapping" "test" { + name = "data_source_test" + enabled = true + roles = [ + "admin" + ] + rules = jsonencode({ + any = [ + { field = { username = "esadmin" } }, + { field = { groups = "cn=admins,dc=example,dc=com" } }, + ] + }) + + metadata = jsonencode({ version = 1 }) +} + +data "elasticstack_elasticsearch_security_role_mapping" "test" { + name = elasticstack_elasticsearch_security_role_mapping.test.name +} +` diff --git a/internal/elasticsearch/security/role_mapping/delete.go b/internal/elasticsearch/security/role_mapping/delete.go new file mode 100644 index 000000000..a790f28b8 --- /dev/null +++ b/internal/elasticsearch/security/role_mapping/delete.go @@ -0,0 +1,31 @@ +package role_mapping + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +func (r *roleMappingResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data RoleMappingData + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + compId, diags := clients.CompositeIdFromStrFw(data.Id.ValueString()) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, data.ElasticsearchConnection, r.client) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(elasticsearch.DeleteRoleMapping(ctx, client, compId.ResourceId)...) +} diff --git a/internal/elasticsearch/security/role_mapping/models.go b/internal/elasticsearch/security/role_mapping/models.go new file mode 100644 index 000000000..293efb611 --- /dev/null +++ b/internal/elasticsearch/security/role_mapping/models.go @@ -0,0 +1,17 @@ +package role_mapping + +import ( + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +type RoleMappingData struct { + Id types.String `tfsdk:"id"` + ElasticsearchConnection types.List `tfsdk:"elasticsearch_connection"` + Name types.String `tfsdk:"name"` + Enabled types.Bool `tfsdk:"enabled"` + Rules jsontypes.Normalized `tfsdk:"rules"` + Roles types.Set `tfsdk:"roles"` + RoleTemplates jsontypes.Normalized `tfsdk:"role_templates"` + Metadata jsontypes.Normalized `tfsdk:"metadata"` +} diff --git a/internal/elasticsearch/security/role_mapping/read.go b/internal/elasticsearch/security/role_mapping/read.go new file mode 100644 index 000000000..7731db4c8 --- /dev/null +++ b/internal/elasticsearch/security/role_mapping/read.go @@ -0,0 +1,116 @@ +package role_mapping + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-log/tflog" +) + +// readRoleMapping reads role mapping data from Elasticsearch and returns RoleMappingData +func readRoleMapping(ctx context.Context, client *clients.ApiClient, roleMappingName string, elasticsearchConnection types.List) (*RoleMappingData, diag.Diagnostics) { + var diags diag.Diagnostics + + roleMapping, apiDiags := elasticsearch.GetRoleMapping(ctx, client, roleMappingName) + diags.Append(apiDiags...) + if diags.HasError() { + return nil, diags + } + + if roleMapping == nil { + return nil, diags + } + + data := &RoleMappingData{} + + // Set basic fields + compId, compDiags := client.ID(ctx, roleMappingName) + diags.Append(utils.FrameworkDiagsFromSDK(compDiags)...) + if diags.HasError() { + return nil, diags + } + data.Id = types.StringValue(compId.String()) + data.ElasticsearchConnection = elasticsearchConnection + data.Name = types.StringValue(roleMapping.Name) + data.Enabled = types.BoolValue(roleMapping.Enabled) + + // Handle rules + rulesJSON, err := json.Marshal(roleMapping.Rules) + if err != nil { + diags.AddError("Failed to marshal rules", err.Error()) + return nil, diags + } + data.Rules = jsontypes.NewNormalizedValue(string(rulesJSON)) + + // Handle roles + data.Roles = utils.SetValueFrom(ctx, roleMapping.Roles, types.StringType, path.Root("roles"), &diags) + if diags.HasError() { + return nil, diags + } + + // Handle role templates + if len(roleMapping.RoleTemplates) > 0 { + roleTemplatesJSON, err := json.Marshal(roleMapping.RoleTemplates) + if err != nil { + diags.AddError("Failed to marshal role templates", err.Error()) + return nil, diags + } + data.RoleTemplates = jsontypes.NewNormalizedValue(string(roleTemplatesJSON)) + } else { + data.RoleTemplates = jsontypes.NewNormalizedNull() + } + + // Handle metadata + metadataJSON, err := json.Marshal(roleMapping.Metadata) + if err != nil { + diags.AddError("Failed to marshal metadata", err.Error()) + return nil, diags + } + data.Metadata = jsontypes.NewNormalizedValue(string(metadataJSON)) + + return data, diags +} + +func (r *roleMappingResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data RoleMappingData + resp.Diagnostics.Append(req.State.Get(ctx, &data)...) + if resp.Diagnostics.HasError() { + return + } + + compId, diags := clients.CompositeIdFromStrFw(data.Id.ValueString()) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + roleMappingName := compId.ResourceId + + client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, data.ElasticsearchConnection, r.client) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + readData, diags := readRoleMapping(ctx, client, roleMappingName, data.ElasticsearchConnection) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + if readData == nil { + tflog.Warn(ctx, fmt.Sprintf(`Role mapping "%s" not found, removing from state`, roleMappingName)) + resp.State.RemoveResource(ctx) + return + } + + resp.Diagnostics.Append(resp.State.Set(ctx, readData)...) +} diff --git a/internal/elasticsearch/security/role_mapping/resource.go b/internal/elasticsearch/security/role_mapping/resource.go new file mode 100644 index 000000000..a5fe9097c --- /dev/null +++ b/internal/elasticsearch/security/role_mapping/resource.go @@ -0,0 +1,26 @@ +package role_mapping + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +func NewRoleMappingResource() resource.Resource { + return &roleMappingResource{} +} + +type roleMappingResource struct { + client *clients.ApiClient +} + +func (r *roleMappingResource) Metadata(_ context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_elasticsearch_security_role_mapping" +} + +func (r *roleMappingResource) Configure(_ context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + client, diags := clients.ConvertProviderData(req.ProviderData) + resp.Diagnostics.Append(diags...) + r.client = client +} diff --git a/internal/elasticsearch/security/role_mapping/schema.go b/internal/elasticsearch/security/role_mapping/schema.go new file mode 100644 index 000000000..3685399aa --- /dev/null +++ b/internal/elasticsearch/security/role_mapping/schema.go @@ -0,0 +1,84 @@ +package role_mapping + +import ( + "context" + "regexp" + + providerschema "github.com/elastic/terraform-provider-elasticstack/internal/schema" + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework-validators/setvalidator" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func (r *roleMappingResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = GetSchema() +} + +func GetSchema() schema.Schema { + return schema.Schema{ + MarkdownDescription: "Manage role mappings. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role-mapping.html", + Blocks: map[string]schema.Block{ + "elasticsearch_connection": providerschema.GetEsFWConnectionBlock("elasticsearch_connection", false), + }, + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + MarkdownDescription: "Internal identifier of the resource", + Computed: true, + }, + "name": schema.StringAttribute{ + MarkdownDescription: "The distinct name that identifies the role mapping, used solely as an identifier.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + Validators: []validator.String{ + stringvalidator.LengthBetween(1, 1024), + stringvalidator.RegexMatches(regexp.MustCompile(`^[[:graph:]]+$`), "must contain printable characters and no spaces"), + }, + }, + "enabled": schema.BoolAttribute{ + MarkdownDescription: "Mappings that have `enabled` set to `false` are ignored when role mapping is performed.", + Optional: true, + Computed: true, + Default: booldefault.StaticBool(true), + }, + "rules": schema.StringAttribute{ + MarkdownDescription: "The rules that determine which users should be matched by the mapping. A rule is a logical condition that is expressed by using a JSON DSL.", + Required: true, + CustomType: jsontypes.NormalizedType{}, + }, + "roles": schema.SetAttribute{ + MarkdownDescription: "A list of role names that are granted to the users that match the role mapping rules.", + ElementType: types.StringType, + Optional: true, + Validators: []validator.Set{ + setvalidator.ExactlyOneOf(path.MatchRoot("role_templates")), + }, + }, + "role_templates": schema.StringAttribute{ + MarkdownDescription: "A list of mustache templates that will be evaluated to determine the roles names that should granted to the users that match the role mapping rules.", + Optional: true, + CustomType: jsontypes.NormalizedType{}, + Validators: []validator.String{ + stringvalidator.ExactlyOneOf(path.MatchRoot("roles")), + }, + }, + "metadata": schema.StringAttribute{ + MarkdownDescription: "Additional metadata that helps define which roles are assigned to each user. Keys beginning with `_` are reserved for system usage.", + Optional: true, + Computed: true, + CustomType: jsontypes.NormalizedType{}, + Default: stringdefault.StaticString("{}"), + }, + }, + } +} diff --git a/internal/elasticsearch/security/role_mapping/update.go b/internal/elasticsearch/security/role_mapping/update.go new file mode 100644 index 000000000..a67755367 --- /dev/null +++ b/internal/elasticsearch/security/role_mapping/update.go @@ -0,0 +1,92 @@ +package role_mapping + +import ( + "context" + "encoding/json" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/elasticsearch" + "github.com/elastic/terraform-provider-elasticstack/internal/models" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/tfsdk" +) + +func (r *roleMappingResource) update(ctx context.Context, plan tfsdk.Plan, state *tfsdk.State) diag.Diagnostics { + var data RoleMappingData + var diags diag.Diagnostics + diags.Append(plan.Get(ctx, &data)...) + if diags.HasError() { + return diags + } + + roleMappingName := data.Name.ValueString() + + client, frameworkDiags := clients.MaybeNewApiClientFromFrameworkResource(ctx, data.ElasticsearchConnection, r.client) + diags.Append(frameworkDiags...) + if diags.HasError() { + return diags + } + + // Parse rules JSON + var rules map[string]interface{} + if err := json.Unmarshal([]byte(data.Rules.ValueString()), &rules); err != nil { + diags.AddError("Failed to parse rules JSON", err.Error()) + return diags + } + + // Parse metadata JSON + metadata := json.RawMessage(data.Metadata.ValueString()) + + // Prepare role mapping + roleMapping := models.RoleMapping{ + Name: roleMappingName, + Enabled: data.Enabled.ValueBool(), + Rules: rules, + Metadata: metadata, + } + + // Handle roles or role templates + if utils.IsKnown(data.Roles) { + roleMapping.Roles = utils.SetTypeAs[string](ctx, data.Roles, path.Root("roles"), &diags) + if diags.HasError() { + return diags + } + } + + if utils.IsKnown(data.RoleTemplates) { + var roleTemplates []map[string]interface{} + if err := json.Unmarshal([]byte(data.RoleTemplates.ValueString()), &roleTemplates); err != nil { + diags.AddError("Failed to parse role templates JSON", err.Error()) + return diags + } + roleMapping.RoleTemplates = roleTemplates + } + + // Put role mapping + apiDiags := elasticsearch.PutRoleMapping(ctx, client, &roleMapping) + diags.Append(apiDiags...) + if diags.HasError() { + return diags + } + + // Read the updated role mapping to ensure consistent result + readData, readDiags := readRoleMapping(ctx, client, roleMappingName, data.ElasticsearchConnection) + diags.Append(readDiags...) + if diags.HasError() { + return diags + } + + if readData != nil { + diags.Append(state.Set(ctx, readData)...) + } + + return diags +} + +func (r *roleMappingResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + diags := r.update(ctx, req.Plan, &resp.State) + resp.Diagnostics.Append(diags...) +} diff --git a/internal/elasticsearch/security/role_mapping_data_source.go b/internal/elasticsearch/security/role_mapping_data_source.go deleted file mode 100644 index 9484ca3fc..000000000 --- a/internal/elasticsearch/security/role_mapping_data_source.go +++ /dev/null @@ -1,77 +0,0 @@ -package security - -import ( - "context" - - "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/elastic/terraform-provider-elasticstack/internal/utils" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" -) - -func DataSourceRoleMapping() *schema.Resource { - roleMappingSchema := map[string]*schema.Schema{ - "id": { - Description: "Internal identifier of the resource", - Type: schema.TypeString, - Computed: true, - }, - "name": { - Type: schema.TypeString, - Required: true, - Description: "The distinct name that identifies the role mapping, used solely as an identifier.", - }, - "enabled": { - Type: schema.TypeBool, - Computed: true, - Description: "Mappings that have `enabled` set to `false` are ignored when role mapping is performed.", - }, - "rules": { - Type: schema.TypeString, - Computed: true, - Description: "The rules that determine which users should be matched by the mapping. A rule is a logical condition that is expressed by using a JSON DSL.", - }, - "roles": { - Type: schema.TypeSet, - Elem: &schema.Schema{ - Type: schema.TypeString, - }, - Computed: true, - Description: "A list of role names that are granted to the users that match the role mapping rules.", - }, - "role_templates": { - Type: schema.TypeString, - Computed: true, - Description: "A list of mustache templates that will be evaluated to determine the roles names that should granted to the users that match the role mapping rules.", - }, - "metadata": { - Type: schema.TypeString, - Computed: true, - Description: "Additional metadata that helps define which roles are assigned to each user. Keys beginning with `_` are reserved for system usage.", - }, - } - - utils.AddConnectionSchema(roleMappingSchema) - - return &schema.Resource{ - Description: "Retrieves role mappings. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html", - ReadContext: dataSourceSecurityRoleMappingRead, - Schema: roleMappingSchema, - } -} - -func dataSourceSecurityRoleMappingRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - - roleId := d.Get("name").(string) - id, diags := client.ID(ctx, roleId) - if diags.HasError() { - return diags - } - d.SetId(id.String()) - - return resourceSecurityRoleMappingRead(ctx, d, meta) -} diff --git a/provider/plugin_framework.go b/provider/plugin_framework.go index d7031c9ce..4cb4408fe 100644 --- a/provider/plugin_framework.go +++ b/provider/plugin_framework.go @@ -11,6 +11,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index/index" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/index/indices" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/security/api_key" + "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/security/role_mapping" "github.com/elastic/terraform-provider-elasticstack/internal/elasticsearch/security/system_user" "github.com/elastic/terraform-provider-elasticstack/internal/fleet/agent_policy" "github.com/elastic/terraform-provider-elasticstack/internal/fleet/enrollment_tokens" @@ -88,6 +89,7 @@ func (p *Provider) DataSources(ctx context.Context) []func() datasource.DataSour enrollment_tokens.NewDataSource, integration_ds.NewDataSource, enrich.NewEnrichPolicyDataSource, + role_mapping.NewRoleMappingDataSource, } } @@ -109,5 +111,6 @@ func (p *Provider) Resources(ctx context.Context) []func() resource.Resource { server_host.NewResource, system_user.NewSystemUserResource, enrich.NewEnrichPolicyResource, + role_mapping.NewRoleMappingResource, } } diff --git a/provider/provider.go b/provider/provider.go index 2cd6441ca..10291a2c9 100644 --- a/provider/provider.go +++ b/provider/provider.go @@ -73,7 +73,6 @@ func New(version string) *schema.Provider { "elasticstack_elasticsearch_ingest_processor_uri_parts": ingest.DataSourceProcessorUriParts(), "elasticstack_elasticsearch_ingest_processor_user_agent": ingest.DataSourceProcessorUserAgent(), "elasticstack_elasticsearch_security_role": security.DataSourceRole(), - "elasticstack_elasticsearch_security_role_mapping": security.DataSourceRoleMapping(), "elasticstack_elasticsearch_security_user": security.DataSourceUser(), "elasticstack_elasticsearch_snapshot_repository": cluster.DataSourceSnapshotRespository(), "elasticstack_elasticsearch_info": cluster.DataSourceClusterInfo(), @@ -82,21 +81,20 @@ func New(version string) *schema.Provider { "elasticstack_kibana_security_role": kibana.DataSourceRole(), }, ResourcesMap: map[string]*schema.Resource{ - "elasticstack_elasticsearch_cluster_settings": cluster.ResourceSettings(), - "elasticstack_elasticsearch_component_template": index.ResourceComponentTemplate(), - "elasticstack_elasticsearch_data_stream": index.ResourceDataStream(), - "elasticstack_elasticsearch_index_lifecycle": index.ResourceIlm(), - "elasticstack_elasticsearch_index_template": index.ResourceTemplate(), - "elasticstack_elasticsearch_ingest_pipeline": ingest.ResourceIngestPipeline(), - "elasticstack_elasticsearch_logstash_pipeline": logstash.ResourceLogstashPipeline(), - "elasticstack_elasticsearch_security_role": security.ResourceRole(), - "elasticstack_elasticsearch_security_role_mapping": security.ResourceRoleMapping(), - "elasticstack_elasticsearch_security_user": security.ResourceUser(), - "elasticstack_elasticsearch_snapshot_lifecycle": cluster.ResourceSlm(), - "elasticstack_elasticsearch_snapshot_repository": cluster.ResourceSnapshotRepository(), - "elasticstack_elasticsearch_script": cluster.ResourceScript(), - "elasticstack_elasticsearch_transform": transform.ResourceTransform(), - "elasticstack_elasticsearch_watch": watcher.ResourceWatch(), + "elasticstack_elasticsearch_cluster_settings": cluster.ResourceSettings(), + "elasticstack_elasticsearch_component_template": index.ResourceComponentTemplate(), + "elasticstack_elasticsearch_data_stream": index.ResourceDataStream(), + "elasticstack_elasticsearch_index_lifecycle": index.ResourceIlm(), + "elasticstack_elasticsearch_index_template": index.ResourceTemplate(), + "elasticstack_elasticsearch_ingest_pipeline": ingest.ResourceIngestPipeline(), + "elasticstack_elasticsearch_logstash_pipeline": logstash.ResourceLogstashPipeline(), + "elasticstack_elasticsearch_security_role": security.ResourceRole(), + "elasticstack_elasticsearch_security_user": security.ResourceUser(), + "elasticstack_elasticsearch_snapshot_lifecycle": cluster.ResourceSlm(), + "elasticstack_elasticsearch_snapshot_repository": cluster.ResourceSnapshotRepository(), + "elasticstack_elasticsearch_script": cluster.ResourceScript(), + "elasticstack_elasticsearch_transform": transform.ResourceTransform(), + "elasticstack_elasticsearch_watch": watcher.ResourceWatch(), "elasticstack_kibana_alerting_rule": kibana.ResourceAlertingRule(), "elasticstack_kibana_space": kibana.ResourceSpace(), From 6d7a8fcb844f13ad69b8bb1c5ce673f0a4797ff9 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Mon, 8 Sep 2025 07:50:01 +1000 Subject: [PATCH 42/66] chore(deps): update codecov/codecov-action digest to 5a10915 (#1283) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- libs/go-kibana-rest/.github/workflows/workflow.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/go-kibana-rest/.github/workflows/workflow.yml b/libs/go-kibana-rest/.github/workflows/workflow.yml index a399bda23..92deb0a42 100644 --- a/libs/go-kibana-rest/.github/workflows/workflow.yml +++ b/libs/go-kibana-rest/.github/workflows/workflow.yml @@ -44,7 +44,7 @@ jobs: run: go build - name: Run test run: make test - - uses: codecov/codecov-action@fdcc8476540edceab3de004e990f80d881c6cc00 # v5 + - uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5 with: files: coverage.out flags: unittests From 764c8cd4b6923f7d929c77017033fe0f0b9587bf Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Sun, 7 Sep 2025 21:59:24 +0000 Subject: [PATCH 43/66] chore(deps): update dependency go to v1.25.1 (#1284) * chore(deps): update dependency go to v1.25.1 * Bump go version to 1.25.1 --------- Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Co-authored-by: Toby Brain --- go.mod | 4 +--- libs/go-kibana-rest/go.mod | 4 +--- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/go.mod b/go.mod index 0a69fdece..8321067e0 100644 --- a/go.mod +++ b/go.mod @@ -1,8 +1,6 @@ module github.com/elastic/terraform-provider-elasticstack -go 1.24.0 - -toolchain go1.25.0 +go 1.25.1 require ( github.com/disaster37/go-kibana-rest/v8 v8.5.0 diff --git a/libs/go-kibana-rest/go.mod b/libs/go-kibana-rest/go.mod index 09736688e..8871875dc 100644 --- a/libs/go-kibana-rest/go.mod +++ b/libs/go-kibana-rest/go.mod @@ -1,8 +1,6 @@ module github.com/disaster37/go-kibana-rest/v8 -go 1.23.0 - -toolchain go1.25.0 +go 1.25.1 require ( github.com/go-resty/resty/v2 v2.16.5 From 9e9810ceecb70cda894d6f2fdc3f648e5edf8876 Mon Sep 17 00:00:00 2001 From: Antonio Date: Mon, 8 Sep 2025 10:36:08 +0200 Subject: [PATCH 44/66] [ResponseOps] Maintenance window resource (#1224) * Maintenance window resource: - uses the plugin framework - client generating the openApi specification * make docs-generate * Change float32s to int32s * Simplify model code. * Resource tests * Update the changelog. * Fix diags and resource tests. * Missing tests. * error handling * PR fixes * improved documentation * version and serverless checks * Addressing PR comments 1 * Use EnforceMinVersion in maintenance_window/create.go Co-authored-by: Toby Brain * Generating docs * fix linter * Moving validation around. * Use terraform-plugin-testing * add iso8601 validation * restructure maintenance window model code * call read after update/create * fix empty recurring field * fix terraform import * Update internal/kibana/maintenance_window/resource.go Co-authored-by: Toby Brain * Addressing PR comments 2 * change schema description * Addressing PR comments. * fix tests --------- Co-authored-by: Toby Brain --- .gitignore | 1 + CHANGELOG.md | 1 + docs/resources/kibana_maintenance_window.md | 110 ++ .../import.sh | 1 + .../resource.tf | 27 + generated/kbapi/kibana.gen.go | 1030 ++++++++++++++++- generated/kbapi/transform_schema.go | 12 + .../clients/kibana_oapi/maintenance_window.go | 73 ++ internal/kibana/alerting.go | 16 +- .../kibana/maintenance_window/acc_test.go | 116 ++ internal/kibana/maintenance_window/create.go | 83 ++ internal/kibana/maintenance_window/delete.go | 28 + internal/kibana/maintenance_window/models.go | 315 +++++ .../kibana/maintenance_window/models_test.go | 505 ++++++++ internal/kibana/maintenance_window/read.go | 62 + .../kibana/maintenance_window/resource.go | 40 + .../maintenance_window/response_types.go | 49 + internal/kibana/maintenance_window/schema.go | 153 +++ internal/kibana/maintenance_window/update.go | 80 ++ .../maintenance_window/version_utils.go | 21 + .../kibana/validators/is_alerting_duration.go | 49 + .../kibana/validators/is_iso8601_string.go | 38 + .../is_maintenance_window_interval.go | 38 + .../is_maintenance_window_week_day.go | 38 + internal/kibana/validators/validators_test.go | 256 ++++ provider/plugin_framework.go | 2 + .../kibana_maintenance_window.md.tmpl | 23 + 27 files changed, 3142 insertions(+), 25 deletions(-) create mode 100644 docs/resources/kibana_maintenance_window.md create mode 100644 examples/resources/elasticstack_kibana_maintenance_window/import.sh create mode 100644 examples/resources/elasticstack_kibana_maintenance_window/resource.tf create mode 100644 internal/clients/kibana_oapi/maintenance_window.go create mode 100644 internal/kibana/maintenance_window/acc_test.go create mode 100644 internal/kibana/maintenance_window/create.go create mode 100644 internal/kibana/maintenance_window/delete.go create mode 100644 internal/kibana/maintenance_window/models.go create mode 100644 internal/kibana/maintenance_window/models_test.go create mode 100644 internal/kibana/maintenance_window/read.go create mode 100644 internal/kibana/maintenance_window/resource.go create mode 100644 internal/kibana/maintenance_window/response_types.go create mode 100644 internal/kibana/maintenance_window/schema.go create mode 100644 internal/kibana/maintenance_window/update.go create mode 100644 internal/kibana/maintenance_window/version_utils.go create mode 100644 internal/kibana/validators/is_alerting_duration.go create mode 100644 internal/kibana/validators/is_iso8601_string.go create mode 100644 internal/kibana/validators/is_maintenance_window_interval.go create mode 100644 internal/kibana/validators/is_maintenance_window_week_day.go create mode 100644 internal/kibana/validators/validators_test.go create mode 100644 templates/resources/kibana_maintenance_window.md.tmpl diff --git a/.gitignore b/.gitignore index 3c429c9dd..bcfa6a1c2 100644 --- a/.gitignore +++ b/.gitignore @@ -26,6 +26,7 @@ website/node_modules *.test *.iml *.vscode +__debug_* website/vendor diff --git a/CHANGELOG.md b/CHANGELOG.md index 3ffbadeef..bc8a1b402 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,6 @@ ## [Unreleased] +- Create `elasticstack_kibana_maintenance_window` resource. ([#1224](https://github.com/elastic/terraform-provider-elasticstack/pull/1224)) - Add support for `solution` field in `elasticstack_kibana_space` resource and data source ([#1102](https://github.com/elastic/terraform-provider-elasticstack/issues/1102)) - Add `slo_id` validation to `elasticstack_kibana_slo` ([#1221](https://github.com/elastic/terraform-provider-elasticstack/pull/1221)) - Add `ignore_missing_component_templates` to `elasticstack_elasticsearch_index_template` ([#1206](https://github.com/elastic/terraform-provider-elasticstack/pull/1206)) diff --git a/docs/resources/kibana_maintenance_window.md b/docs/resources/kibana_maintenance_window.md new file mode 100644 index 000000000..adb6d9b66 --- /dev/null +++ b/docs/resources/kibana_maintenance_window.md @@ -0,0 +1,110 @@ +--- +subcategory: "Kibana" +layout: "" +page_title: "Elasticstack: elasticstack_kibana_maintenance_window Resource" +description: |- + Manages Kibana maintenance windows. +--- + +# Resource: elasticstack_kibana_maintenance_window + +Creates and manages Kibana [maintenance windows](https://www.elastic.co/docs/api/doc/kibana/group/endpoint-maintenance-window) + +## Example Usage + +```terraform +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +resource "elasticstack_kibana_maintenance_window" "my_maintenance_window" { + title = "UPDATE TEST" + enabled = true + + custom_schedule = { + start = "1993-01-01T05:00:00.200Z" + duration = "12d" + + recurring = { + every = "21d" + on_week_day = ["MO", "+3TU", "-2FR"] + on_month_day = [1, 2, 4, 6, 7] + on_month = [12] + } + } + + scope = { + alerting = { + kql = "_id: '1234'" + } + } +} +``` + + +## Schema + +### Required + +- `custom_schedule` (Attributes) A set schedule over which the maintenance window applies. (see [below for nested schema](#nestedatt--custom_schedule)) +- `title` (String) The name of the maintenance window. + +### Optional + +- `enabled` (Boolean) Whether the current maintenance window is enabled. +- `scope` (Attributes) An object that narrows the scope of what is affected by this maintenance window. (see [below for nested schema](#nestedatt--scope)) +- `space_id` (String) An identifier for the space. If space_id is not provided, the default space is used. + +### Read-Only + +- `id` (String) Generated ID for the maintenance window. + + +### Nested Schema for `custom_schedule` + +Required: + +- `duration` (String) The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for days, hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. +- `recurring` (Attributes) A set schedule over which the maintenance window applies. (see [below for nested schema](#nestedatt--custom_schedule--recurring)) +- `start` (String) The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + +Optional: + +- `timezone` (String) The timezone of the schedule. The default timezone is UTC. + + +### Nested Schema for `custom_schedule.recurring` + +Optional: + +- `end` (String) The end date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. +- `every` (String) The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for days, hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. +- `occurrences` (Number) The total number of recurrences of the schedule. +- `on_month` (List of Number) The specific months for a recurring schedule. Valid values are 1-12. +- `on_month_day` (List of Number) The specific days of the month for a recurring schedule. Valid values are 1-31. +- `on_week_day` (List of String) The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + + + + +### Nested Schema for `scope` + +Required: + +- `alerting` (Attributes) A set schedule over which the maintenance window applies. (see [below for nested schema](#nestedatt--scope--alerting)) + + +### Nested Schema for `scope.alerting` + +Required: + +- `kql` (String) A filter written in Kibana Query Language (KQL). + +## Import + +Import is supported using the following syntax: + +```shell +terraform import elasticstack_kibana_maintenance_window.my_maintenance_window / +``` diff --git a/examples/resources/elasticstack_kibana_maintenance_window/import.sh b/examples/resources/elasticstack_kibana_maintenance_window/import.sh new file mode 100644 index 000000000..42d786bab --- /dev/null +++ b/examples/resources/elasticstack_kibana_maintenance_window/import.sh @@ -0,0 +1 @@ +terraform import elasticstack_kibana_maintenance_window.my_maintenance_window / diff --git a/examples/resources/elasticstack_kibana_maintenance_window/resource.tf b/examples/resources/elasticstack_kibana_maintenance_window/resource.tf new file mode 100644 index 000000000..2c564de26 --- /dev/null +++ b/examples/resources/elasticstack_kibana_maintenance_window/resource.tf @@ -0,0 +1,27 @@ +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +resource "elasticstack_kibana_maintenance_window" "my_maintenance_window" { + title = "UPDATE TEST" + enabled = true + + custom_schedule = { + start = "1993-01-01T05:00:00.200Z" + duration = "12d" + + recurring = { + every = "21d" + on_week_day = ["MO", "+3TU", "-2FR"] + on_month_day = [1, 2, 4, 6, 7] + on_month = [12] + } + } + + scope = { + alerting = { + kql = "_id: '1234'" + } + } +} diff --git a/generated/kbapi/kibana.gen.go b/generated/kbapi/kibana.gen.go index 5eff5b6fb..0ac695f8f 100644 --- a/generated/kbapi/kibana.gen.go +++ b/generated/kbapi/kibana.gen.go @@ -4421,6 +4421,103 @@ type DeleteAgentConfigurationJSONRequestBody = APMUIDeleteServiceObject // CreateUpdateAgentConfigurationJSONRequestBody defines body for CreateUpdateAgentConfiguration for application/json ContentType. type CreateUpdateAgentConfigurationJSONRequestBody = APMUIAgentConfigurationIntakeObject +// PostMaintenanceWindowJSONBody defines parameters for PostMaintenanceWindow. + +type PostMaintenanceWindowJSONBody struct { + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled *bool `json:"enabled,omitempty"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). Only alerts matching this query will be supressed by the maintenance window. + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Title The name of the maintenance window. While this name does not have to be unique, a distinctive name can help you identify a specific maintenance window. + Title string `json:"title"` +} + +// PatchMaintenanceWindowIdJSONBody defines parameters for PatchMaintenanceWindowId. +type PatchMaintenanceWindowIdJSONBody struct { + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled *bool `json:"enabled,omitempty"` + Schedule *struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule,omitempty"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). Only alerts matching this query will be supressed by the maintenance window. + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Title The name of the maintenance window. While this name does not have to be unique, a distinctive name can help you identify a specific maintenance window. + Title *string `json:"title,omitempty"` +} + // PostFleetAgentPoliciesJSONRequestBody defines body for PostFleetAgentPolicies for application/json ContentType. type PostFleetAgentPoliciesJSONRequestBody PostFleetAgentPoliciesJSONBody @@ -4469,6 +4566,12 @@ type CreateDataViewDefaultwJSONRequestBody = DataViewsCreateDataViewRequestObjec // UpdateDataViewDefaultJSONRequestBody defines body for UpdateDataViewDefault for application/json ContentType. type UpdateDataViewDefaultJSONRequestBody = DataViewsUpdateDataViewRequestObject +// PostMaintenanceWindowJSONRequestBody defines body for PostMaintenanceWindow for application/json ContentType. +type PostMaintenanceWindowJSONRequestBody PostMaintenanceWindowJSONBody + +// PatchMaintenanceWindowIdJSONRequestBody defines body for PatchMaintenanceWindowId for application/json ContentType. +type PatchMaintenanceWindowIdJSONRequestBody PatchMaintenanceWindowIdJSONBody + // Getter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges. Returns the specified // element and whether it was found func (a AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) Get(fieldName string) (value interface{}, found bool) { @@ -18124,6 +18227,22 @@ type ClientInterface interface { UpdateDataViewDefaultWithBody(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) UpdateDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostMaintenanceWindowWithBody request with any body + PostMaintenanceWindowWithBody(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostMaintenanceWindow(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteMaintenanceWindowId request + DeleteMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetMaintenanceWindowId request + GetMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PatchMaintenanceWindowIdWithBody request with any body + PatchMaintenanceWindowIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PatchMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) } func (c *Client) DeleteAgentConfigurationWithBody(ctx context.Context, params *DeleteAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { @@ -19026,6 +19145,78 @@ func NewCreateUpdateAgentConfigurationRequestWithBody(server string, params *Cre return req, nil } +func (c *Client) PostMaintenanceWindowWithBody(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostMaintenanceWindowRequestWithBody(c.Server, spaceId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostMaintenanceWindow(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostMaintenanceWindowRequest(c.Server, spaceId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteMaintenanceWindowIdRequest(c.Server, spaceId, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetMaintenanceWindowIdRequest(c.Server, spaceId, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchMaintenanceWindowIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchMaintenanceWindowIdRequestWithBody(c.Server, spaceId, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchMaintenanceWindowIdRequest(c.Server, spaceId, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + // NewGetFleetAgentPoliciesRequest generates requests for GetFleetAgentPolicies func NewGetFleetAgentPoliciesRequest(server string, params *GetFleetAgentPoliciesParams) (*http.Request, error) { var err error @@ -21337,6 +21528,189 @@ func NewUpdateDataViewDefaultRequestWithBody(server string, spaceId SpaceId, vie return req, nil } +// NewPostMaintenanceWindowRequest calls the generic PostMaintenanceWindow builder with application/json body +func NewPostMaintenanceWindowRequest(server string, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostMaintenanceWindowRequestWithBody(server, spaceId, "application/json", bodyReader) +} + +// NewPostMaintenanceWindowRequestWithBody generates requests for PostMaintenanceWindow with any type of body +func NewPostMaintenanceWindowRequestWithBody(server string, spaceId SpaceId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/maintenance_window", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteMaintenanceWindowIdRequest generates requests for DeleteMaintenanceWindowId +func NewDeleteMaintenanceWindowIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/maintenance_window/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetMaintenanceWindowIdRequest generates requests for GetMaintenanceWindowId +func NewGetMaintenanceWindowIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/maintenance_window/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPatchMaintenanceWindowIdRequest calls the generic PatchMaintenanceWindowId builder with application/json body +func NewPatchMaintenanceWindowIdRequest(server string, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchMaintenanceWindowIdRequestWithBody(server, spaceId, id, "application/json", bodyReader) +} + +// NewPatchMaintenanceWindowIdRequestWithBody generates requests for PatchMaintenanceWindowId with any type of body +func NewPatchMaintenanceWindowIdRequestWithBody(server string, spaceId SpaceId, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/maintenance_window/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + func (c *Client) applyEditors(ctx context.Context, req *http.Request, additionalEditors []RequestEditorFn) error { for _, r := range c.RequestEditors { if err := r(ctx, req); err != nil { @@ -21544,6 +21918,22 @@ type ClientWithResponsesInterface interface { UpdateDataViewDefaultWithBodyWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateDataViewDefaultResponse, error) UpdateDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateDataViewDefaultResponse, error) + + // PostMaintenanceWindowWithBodyWithResponse request with any body + PostMaintenanceWindowWithBodyWithResponse(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) + + PostMaintenanceWindowWithResponse(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) + + // DeleteMaintenanceWindowIdWithResponse request + DeleteMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteMaintenanceWindowIdResponse, error) + + // GetMaintenanceWindowIdWithResponse request + GetMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetMaintenanceWindowIdResponse, error) + + // PatchMaintenanceWindowIdWithBodyWithResponse request with any body + PatchMaintenanceWindowIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) + + PatchMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) } type DeleteAgentConfigurationResponse struct { @@ -22927,20 +23317,299 @@ func (c *ClientWithResponses) CreateUpdateAgentConfigurationWithResponse(ctx con return ParseCreateUpdateAgentConfigurationResponse(rsp) } -// GetFleetAgentPoliciesWithResponse request returning *GetFleetAgentPoliciesResponse -func (c *ClientWithResponses) GetFleetAgentPoliciesWithResponse(ctx context.Context, params *GetFleetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesResponse, error) { - rsp, err := c.GetFleetAgentPolicies(ctx, params, reqEditors...) - if err != nil { - return nil, err - } - return ParseGetFleetAgentPoliciesResponse(rsp) -} +type PostMaintenanceWindowResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` -// PostFleetAgentPoliciesWithBodyWithResponse request with arbitrary body returning *PostFleetAgentPoliciesResponse -func (c *ClientWithResponses) PostFleetAgentPoliciesWithBodyWithResponse(ctx context.Context, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesResponse, error) { - rsp, err := c.PostFleetAgentPoliciesWithBody(ctx, params, contentType, body, reqEditors...) - if err != nil { - return nil, err + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` + + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` + + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Status The current status of the maintenance window. + Status PostMaintenanceWindow200Status `json:"status"` + + // Title The name of the maintenance window. + Title string `json:"title"` + + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } +} +type PostMaintenanceWindow200Status string + +// Status returns HTTPResponse.Status +func (r PostMaintenanceWindowResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostMaintenanceWindowResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteMaintenanceWindowIdResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r DeleteMaintenanceWindowIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteMaintenanceWindowIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetMaintenanceWindowIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` + + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` + + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Status The current status of the maintenance window. + Status GetMaintenanceWindowId200Status `json:"status"` + + // Title The name of the maintenance window. + Title string `json:"title"` + + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } +} +type GetMaintenanceWindowId200Status string + +// Status returns HTTPResponse.Status +func (r GetMaintenanceWindowIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetMaintenanceWindowIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchMaintenanceWindowIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` + + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` + + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Status The current status of the maintenance window. + Status PatchMaintenanceWindowId200Status `json:"status"` + + // Title The name of the maintenance window. + Title string `json:"title"` + + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } +} +type PatchMaintenanceWindowId200Status string + +// Status returns HTTPResponse.Status +func (r PatchMaintenanceWindowIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchMaintenanceWindowIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +// GetFleetAgentPoliciesWithResponse request returning *GetFleetAgentPoliciesResponse +func (c *ClientWithResponses) GetFleetAgentPoliciesWithResponse(ctx context.Context, params *GetFleetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesResponse, error) { + rsp, err := c.GetFleetAgentPolicies(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetFleetAgentPoliciesResponse(rsp) +} + +// PostFleetAgentPoliciesWithBodyWithResponse request with arbitrary body returning *PostFleetAgentPoliciesResponse +func (c *ClientWithResponses) PostFleetAgentPoliciesWithBodyWithResponse(ctx context.Context, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesResponse, error) { + rsp, err := c.PostFleetAgentPoliciesWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } return ParsePostFleetAgentPoliciesResponse(rsp) } @@ -23570,6 +24239,58 @@ func ParseCreateUpdateAgentConfigurationResponse(rsp *http.Response) (*CreateUpd return response, nil } +// PostMaintenanceWindowWithBodyWithResponse request with arbitrary body returning *PostMaintenanceWindowResponse +func (c *ClientWithResponses) PostMaintenanceWindowWithBodyWithResponse(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) { + rsp, err := c.PostMaintenanceWindowWithBody(ctx, spaceId, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostMaintenanceWindowResponse(rsp) +} + +func (c *ClientWithResponses) PostMaintenanceWindowWithResponse(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) { + rsp, err := c.PostMaintenanceWindow(ctx, spaceId, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostMaintenanceWindowResponse(rsp) +} + +// DeleteMaintenanceWindowIdWithResponse request returning *DeleteMaintenanceWindowIdResponse +func (c *ClientWithResponses) DeleteMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteMaintenanceWindowIdResponse, error) { + rsp, err := c.DeleteMaintenanceWindowId(ctx, spaceId, id, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteMaintenanceWindowIdResponse(rsp) +} + +// GetMaintenanceWindowIdWithResponse request returning *GetMaintenanceWindowIdResponse +func (c *ClientWithResponses) GetMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetMaintenanceWindowIdResponse, error) { + rsp, err := c.GetMaintenanceWindowId(ctx, spaceId, id, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetMaintenanceWindowIdResponse(rsp) +} + +// PatchMaintenanceWindowIdWithBodyWithResponse request with arbitrary body returning *PatchMaintenanceWindowIdResponse +func (c *ClientWithResponses) PatchMaintenanceWindowIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) { + rsp, err := c.PatchMaintenanceWindowIdWithBody(ctx, spaceId, id, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePatchMaintenanceWindowIdResponse(rsp) +} + +func (c *ClientWithResponses) PatchMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) { + rsp, err := c.PatchMaintenanceWindowId(ctx, spaceId, id, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePatchMaintenanceWindowIdResponse(rsp) +} + // ParseGetFleetAgentPoliciesResponse parses an HTTP response from a GetFleetAgentPoliciesWithResponse call func ParseGetFleetAgentPoliciesResponse(rsp *http.Response) (*GetFleetAgentPoliciesResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) @@ -25121,3 +25842,286 @@ func ParseUpdateDataViewDefaultResponse(rsp *http.Response) (*UpdateDataViewDefa return response, nil } + +// ParsePostMaintenanceWindowResponse parses an HTTP response from a PostMaintenanceWindowWithResponse call +func ParsePostMaintenanceWindowResponse(rsp *http.Response) (*PostMaintenanceWindowResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &PostMaintenanceWindowResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` + + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` + + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Status The current status of the maintenance window. + Status PostMaintenanceWindow200Status `json:"status"` + + // Title The name of the maintenance window. + Title string `json:"title"` + + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseDeleteMaintenanceWindowIdResponse parses an HTTP response from a DeleteMaintenanceWindowIdWithResponse call +func ParseDeleteMaintenanceWindowIdResponse(rsp *http.Response) (*DeleteMaintenanceWindowIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &DeleteMaintenanceWindowIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil +} + +// ParseGetMaintenanceWindowIdResponse parses an HTTP response from a GetMaintenanceWindowIdWithResponse call +func ParseGetMaintenanceWindowIdResponse(rsp *http.Response) (*GetMaintenanceWindowIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &GetMaintenanceWindowIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` + + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` + + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Status The current status of the maintenance window. + Status GetMaintenanceWindowId200Status `json:"status"` + + // Title The name of the maintenance window. + Title string `json:"title"` + + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParsePatchMaintenanceWindowIdResponse parses an HTTP response from a PatchMaintenanceWindowIdWithResponse call +func ParsePatchMaintenanceWindowIdResponse(rsp *http.Response) (*PatchMaintenanceWindowIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &PatchMaintenanceWindowIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` + + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` + + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Status The current status of the maintenance window. + Status PatchMaintenanceWindowId200Status `json:"status"` + + // Title The name of the maintenance window. + Title string `json:"title"` + + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} diff --git a/generated/kbapi/transform_schema.go b/generated/kbapi/transform_schema.go index 17d86fb24..1e6af5f1d 100644 --- a/generated/kbapi/transform_schema.go +++ b/generated/kbapi/transform_schema.go @@ -108,6 +108,7 @@ func (s Schema) MustGetPath(path string) *Path { type Path struct { Parameters []Map `yaml:"parameters,omitempty"` Get Map `yaml:"get,omitempty"` + Patch Map `yaml:"patch,omitempty"` Post Map `yaml:"post,omitempty"` Put Map `yaml:"put,omitempty"` Delete Map `yaml:"delete,omitempty"` @@ -123,6 +124,9 @@ func (p Path) Endpoints(yield func(key string, endpoint Map) bool) { if p.Put != nil { yield("put", p.Put) } + if p.Patch != nil { + yield("patch", p.Patch) + } if p.Delete != nil { yield("delete", p.Delete) } @@ -136,6 +140,8 @@ func (p Path) GetEndpoint(method string) Map { return p.Post case "put": return p.Put + case "patch": + return p.Patch case "delete": return p.Delete default: @@ -160,6 +166,8 @@ func (p *Path) SetEndpoint(method string, endpoint Map) { p.Post = endpoint case "put": p.Put = endpoint + case "patch": + p.Patch = endpoint case "delete": p.Delete = endpoint default: @@ -572,6 +580,8 @@ func transformFilterPaths(schema *Schema) { "/api/synthetics/params": {"post"}, "/api/synthetics/params/{id}": {"get", "put", "delete"}, "/api/apm/settings/agent-configuration": {"get", "put", "delete"}, + "/api/maintenance_window": {"post"}, + "/api/maintenance_window/{id}": {"delete", "get", "patch"}, "/api/actions/connector/{id}": {"get", "put", "post", "delete"}, "/api/actions/connectors": {"get"}, } @@ -720,6 +730,8 @@ func transformKibanaPaths(schema *Schema) { "/api/data_views", "/api/data_views/data_view", "/api/data_views/data_view/{viewId}", + "/api/maintenance_window", + "/api/maintenance_window/{id}", "/api/actions/connector/{id}", "/api/actions/connectors", } diff --git a/internal/clients/kibana_oapi/maintenance_window.go b/internal/clients/kibana_oapi/maintenance_window.go new file mode 100644 index 000000000..8babf2c92 --- /dev/null +++ b/internal/clients/kibana_oapi/maintenance_window.go @@ -0,0 +1,73 @@ +package kibana_oapi + +import ( + "context" + "net/http" + + "github.com/elastic/terraform-provider-elasticstack/generated/kbapi" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework/diag" +) + +// GetMaintenanceWindow reads a maintenance window from the API by ID +func GetMaintenanceWindow(ctx context.Context, client *Client, spaceID string, maintenanceWindowID string) (*kbapi.GetMaintenanceWindowIdResponse, diag.Diagnostics) { + resp, err := client.API.GetMaintenanceWindowIdWithResponse(ctx, spaceID, maintenanceWindowID) + + if err != nil { + return nil, utils.FrameworkDiagFromError(err) + } + + switch resp.StatusCode() { + case http.StatusOK: + return resp, nil + default: + return nil, reportUnknownError(resp.StatusCode(), resp.Body) + } +} + +// CreateMaintenanceWindow creates a new maintenance window. +func CreateMaintenanceWindow(ctx context.Context, client *Client, spaceID string, body kbapi.PostMaintenanceWindowJSONRequestBody) (*kbapi.PostMaintenanceWindowResponse, diag.Diagnostics) { + resp, err := client.API.PostMaintenanceWindowWithResponse(ctx, spaceID, body) + if err != nil { + return nil, utils.FrameworkDiagFromError(err) + } + + switch resp.StatusCode() { + case http.StatusOK: + return resp, nil + default: + return nil, reportUnknownError(resp.StatusCode(), resp.Body) + } +} + +// UpdateMaintenanceWindow updates an existing maintenance window. +func UpdateMaintenanceWindow(ctx context.Context, client *Client, spaceID string, maintenanceWindowID string, req kbapi.PatchMaintenanceWindowIdJSONRequestBody) diag.Diagnostics { + resp, err := client.API.PatchMaintenanceWindowIdWithResponse(ctx, spaceID, maintenanceWindowID, req) + if err != nil { + return utils.FrameworkDiagFromError(err) + } + + switch resp.StatusCode() { + case http.StatusOK: + return nil + default: + return reportUnknownError(resp.StatusCode(), resp.Body) + } +} + +// DeleteMaintenanceWindow deletes an existing maintenance window. +func DeleteMaintenanceWindow(ctx context.Context, client *Client, spaceID string, maintenanceWindowID string) diag.Diagnostics { + resp, err := client.API.DeleteMaintenanceWindowIdWithResponse(ctx, spaceID, maintenanceWindowID) + if err != nil { + return utils.FrameworkDiagFromError(err) + } + + switch resp.StatusCode() { + case http.StatusNoContent: + return nil + case http.StatusNotFound: + return nil + default: + return reportUnknownError(resp.StatusCode(), resp.Body) + } +} diff --git a/internal/kibana/alerting.go b/internal/kibana/alerting.go index ab1b83960..b6e5fac13 100644 --- a/internal/kibana/alerting.go +++ b/internal/kibana/alerting.go @@ -4,11 +4,11 @@ import ( "context" "encoding/json" "fmt" - "regexp" "strings" "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana" + "github.com/elastic/terraform-provider-elasticstack/internal/kibana/validators" "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/hashicorp/go-version" @@ -22,14 +22,6 @@ var frequencyMinSupportedVersion = version.Must(version.NewVersion("8.6.0")) var alertsFilterMinSupportedVersion = version.Must(version.NewVersion("8.9.0")) var alertDelayMinSupportedVersion = version.Must(version.NewVersion("8.13.0")) -// Avoid lint error on deprecated SchemaValidateFunc usage. -// -//nolint:staticcheck -func stringIsAlertingDuration() schema.SchemaValidateFunc { - r := regexp.MustCompile(`^[1-9][0-9]*(?:d|h|m|s)$`) - return validation.StringMatch(r, "string is not a valid Alerting duration in seconds (s), minutes (m), hours (h), or days (d)") -} - func ResourceAlertingRule() *schema.Resource { apikeySchema := map[string]*schema.Schema{ "rule_id": { @@ -80,7 +72,7 @@ func ResourceAlertingRule() *schema.Resource { Description: "The check interval, which specifies how frequently the rule conditions are checked. The interval must be specified in seconds, minutes, hours or days.", Type: schema.TypeString, Required: true, - ValidateFunc: stringIsAlertingDuration(), + ValidateFunc: validators.StringIsAlertingDurationSDKV2(), }, "actions": { Description: "An action that runs under defined conditions.", @@ -129,7 +121,7 @@ func ResourceAlertingRule() *schema.Resource { Description: "Defines how often an alert generates repeated actions. This custom action interval must be specified in seconds, minutes, hours, or days. For example, 10m or 1h. This property is applicable only if `notify_when` is `onThrottleInterval`. NOTE: This is a rule level property; if you update the rule in Kibana, it is automatically changed to use action-specific `throttle` values.", Type: schema.TypeString, Optional: true, - ValidateFunc: stringIsAlertingDuration(), + ValidateFunc: validators.StringIsAlertingDurationSDKV2(), }, }, }, @@ -207,7 +199,7 @@ func ResourceAlertingRule() *schema.Resource { Description: "Deprecated in 8.13.0. Defines how often an alert generates repeated actions. This custom action interval must be specified in seconds, minutes, hours, or days. For example, 10m or 1h. This property is applicable only if `notify_when` is `onThrottleInterval`. NOTE: This is a rule level property; if you update the rule in Kibana, it is automatically changed to use action-specific `throttle` values.", Type: schema.TypeString, Optional: true, - ValidateFunc: stringIsAlertingDuration(), + ValidateFunc: validators.StringIsAlertingDurationSDKV2(), }, "scheduled_task_id": { Description: "ID of the scheduled task that will execute the alert.", diff --git a/internal/kibana/maintenance_window/acc_test.go b/internal/kibana/maintenance_window/acc_test.go new file mode 100644 index 000000000..c68db0765 --- /dev/null +++ b/internal/kibana/maintenance_window/acc_test.go @@ -0,0 +1,116 @@ +package maintenance_window_test + +import ( + "testing" + + "github.com/elastic/terraform-provider-elasticstack/internal/acctest" + "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" + "github.com/hashicorp/go-version" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" +) + +var minMaintenanceWindowAPISupport = version.Must(version.NewVersion("9.1.0")) + +func TestAccResourceMaintenanceWindow(t *testing.T) { + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minMaintenanceWindowAPISupport), + Config: testAccResourceMaintenanceWindowCreate, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "title", "Terraform Maintenance Window"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "enabled", "true"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.start", "1992-01-01T05:00:00.200Z"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.duration", "10d"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.timezone", "UTC"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.recurring.every", "20d"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.recurring.end", "2029-05-17T05:05:00.000Z"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.recurring.on_week_day.0", "MO"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.recurring.on_week_day.1", "TU"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "scope.alerting.kql", "_id: '1234'"), + ), + }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minMaintenanceWindowAPISupport), + Config: testAccResourceMaintenanceWindowUpdate, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "title", "Terraform Maintenance Window UPDATED"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "enabled", "false"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.start", "1999-02-02T05:00:00.200Z"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.duration", "12d"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.timezone", "Asia/Taipei"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.recurring.every", "21d"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.recurring.on_month_day.0", "1"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.recurring.on_month_day.1", "2"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.recurring.on_month_day.2", "3"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.recurring.on_month.0", "4"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "custom_schedule.recurring.on_month.1", "5"), + resource.TestCheckResourceAttr("elasticstack_kibana_maintenance_window.test_maintenance_window", "scope.alerting.kql", "_id: 'foobar'"), + ), + }, + }, + }) +} + +const testAccResourceMaintenanceWindowCreate = ` +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +resource "elasticstack_kibana_maintenance_window" "test_maintenance_window" { + title = "Terraform Maintenance Window" + enabled = true + + custom_schedule = { + start = "1992-01-01T05:00:00.200Z" + duration = "10d" + timezone = "UTC" + + recurring = { + every = "20d" + end = "2029-05-17T05:05:00.000Z" + on_week_day = ["MO", "TU"] + } + } + + scope = { + alerting = { + kql = "_id: '1234'" + } + } +} +` + +const testAccResourceMaintenanceWindowUpdate = ` +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +resource "elasticstack_kibana_maintenance_window" "test_maintenance_window" { + title = "Terraform Maintenance Window UPDATED" + enabled = false + + custom_schedule = { + start = "1999-02-02T05:00:00.200Z" + duration = "12d" + timezone = "Asia/Taipei" + + recurring = { + every = "21d" + on_month_day = [1, 2, 3] + on_month = [4, 5] + } + } + + scope = { + alerting = { + kql = "_id: 'foobar'" + } + } +} +` diff --git a/internal/kibana/maintenance_window/create.go b/internal/kibana/maintenance_window/create.go new file mode 100644 index 000000000..a38a14a3c --- /dev/null +++ b/internal/kibana/maintenance_window/create.go @@ -0,0 +1,83 @@ +package maintenance_window + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/go-version" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func (r *MaintenanceWindowResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + // Retrieve values from plan + var planMaintenanceWindow MaintenanceWindowModel + + diags := req.Plan.Get(ctx, &planMaintenanceWindow) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + // Generate API request body from plan + body, diags := planMaintenanceWindow.toAPICreateRequest(ctx) + + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + isSupported, sdkDiags := r.client.EnforceMinVersion(ctx, version.Must(version.NewVersion("9.1.0"))) + resp.Diagnostics.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) + if resp.Diagnostics.HasError() { + return + } + + if !isSupported { + resp.Diagnostics.AddError("Unsupported server version", "Maintenance windows are not supported until Elastic Stack v9.0. Upgrade the target server to use this resource") + return + } + + client, err := r.client.GetKibanaOapiClient() + if err != nil { + resp.Diagnostics.AddError(err.Error(), "") + return + } + + spaceID := planMaintenanceWindow.SpaceID.ValueString() + createMaintenanceWindowResponse, diags := kibana_oapi.CreateMaintenanceWindow(ctx, client, spaceID, body) + + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + /* + * In create/update paths we typically follow the write operation with a read, and then set the state from the read. + * We want to avoid a dirty plan immediately after an apply. + */ + maintenanceWindowID := createMaintenanceWindowResponse.JSON200.Id + readMaintenanceWindowResponse, diags := kibana_oapi.GetMaintenanceWindow(ctx, client, spaceID, maintenanceWindowID) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + if readMaintenanceWindowResponse == nil { + resp.State.RemoveResource(ctx) + return + } + + diags = planMaintenanceWindow.fromAPIReadResponse(ctx, readMaintenanceWindowResponse) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + planMaintenanceWindow.ID = types.StringValue(maintenanceWindowID) + planMaintenanceWindow.SpaceID = types.StringValue(spaceID) + + diags = resp.State.Set(ctx, planMaintenanceWindow) + resp.Diagnostics.Append(diags...) +} diff --git a/internal/kibana/maintenance_window/delete.go b/internal/kibana/maintenance_window/delete.go new file mode 100644 index 000000000..52228ee62 --- /dev/null +++ b/internal/kibana/maintenance_window/delete.go @@ -0,0 +1,28 @@ +package maintenance_window + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +func (r *MaintenanceWindowResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var stateModel MaintenanceWindowModel + + diags := req.State.Get(ctx, &stateModel) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + client, err := r.client.GetKibanaOapiClient() + if err != nil { + resp.Diagnostics.AddError(err.Error(), "") + return + } + + maintenanceWindowID, spaceID := stateModel.getMaintenanceWindowIDAndSpaceID() + diags = kibana_oapi.DeleteMaintenanceWindow(ctx, client, spaceID, maintenanceWindowID) + resp.Diagnostics.Append(diags...) +} diff --git a/internal/kibana/maintenance_window/models.go b/internal/kibana/maintenance_window/models.go new file mode 100644 index 000000000..22dff578f --- /dev/null +++ b/internal/kibana/maintenance_window/models.go @@ -0,0 +1,315 @@ +package maintenance_window + +import ( + "context" + "encoding/json" + + "github.com/elastic/terraform-provider-elasticstack/generated/kbapi" + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +type MaintenanceWindowModel struct { + ID types.String `tfsdk:"id"` + SpaceID types.String `tfsdk:"space_id"` + Title types.String `tfsdk:"title"` + Enabled types.Bool `tfsdk:"enabled"` + CustomSchedule MaintenanceWindowSchedule `tfsdk:"custom_schedule"` + Scope *MaintenanceWindowScope `tfsdk:"scope"` +} + +type MaintenanceWindowScope struct { + Alerting MaintenanceWindowAlertingScope `tfsdk:"alerting"` +} + +type MaintenanceWindowAlertingScope struct { + Kql types.String `tfsdk:"kql"` +} + +type MaintenanceWindowSchedule struct { + Start types.String `tfsdk:"start"` + Duration types.String `tfsdk:"duration"` + Timezone types.String `tfsdk:"timezone"` + Recurring *MaintenanceWindowScheduleRecurring `tfsdk:"recurring"` +} + +type MaintenanceWindowScheduleRecurring struct { + End types.String `tfsdk:"end"` + Every types.String `tfsdk:"every"` + Occurrences types.Int32 `tfsdk:"occurrences"` + OnWeekDay types.List `tfsdk:"on_week_day"` + OnMonthDay types.List `tfsdk:"on_month_day"` + OnMonth types.List `tfsdk:"on_month"` +} + +/* CREATE */ + +func (model MaintenanceWindowModel) toAPICreateRequest(ctx context.Context) (kbapi.PostMaintenanceWindowJSONRequestBody, diag.Diagnostics) { + body := kbapi.PostMaintenanceWindowJSONRequestBody{ + Enabled: model.Enabled.ValueBoolPointer(), + Title: model.Title.ValueString(), + } + + body.Schedule.Custom.Duration = model.CustomSchedule.Duration.ValueString() + body.Schedule.Custom.Start = model.CustomSchedule.Start.ValueString() + + if !model.CustomSchedule.Timezone.IsNull() && !model.CustomSchedule.Timezone.IsUnknown() { + body.Schedule.Custom.Timezone = model.CustomSchedule.Timezone.ValueStringPointer() + } + + customRecurring, diags := model.CustomSchedule.Recurring.toAPIRequest(ctx) + body.Schedule.Custom.Recurring = customRecurring + body.Scope = model.Scope.toAPIRequest() + + return body, diags +} + +/* READ */ + +func (model *MaintenanceWindowModel) fromAPIReadResponse(ctx context.Context, data *kbapi.GetMaintenanceWindowIdResponse) diag.Diagnostics { + if data == nil { + return nil + } + + var diags = diag.Diagnostics{} + var response = &ResponseJson{} + + if err := json.Unmarshal(data.Body, response); err != nil { + diags.AddError(err.Error(), "cannot unmarshal GetMaintenanceWindowIdResponse") + return diags + } + + return model._fromAPIResponse(ctx, *response) +} + +/* UPDATE */ + +func (model MaintenanceWindowModel) toAPIUpdateRequest(ctx context.Context) (kbapi.PatchMaintenanceWindowIdJSONRequestBody, diag.Diagnostics) { + body := kbapi.PatchMaintenanceWindowIdJSONRequestBody{ + Enabled: model.Enabled.ValueBoolPointer(), + Title: model.Title.ValueStringPointer(), + } + + body.Schedule = &struct { + Custom struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + }{ + Custom: struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + }{ + Duration: model.CustomSchedule.Duration.ValueString(), + Start: model.CustomSchedule.Start.ValueString(), + }, + } + + if utils.IsKnown(model.CustomSchedule.Timezone) { + body.Schedule.Custom.Timezone = model.CustomSchedule.Timezone.ValueStringPointer() + } + + customRecurring, diags := model.CustomSchedule.Recurring.toAPIRequest(ctx) + body.Schedule.Custom.Recurring = customRecurring + body.Scope = model.Scope.toAPIRequest() + + return body, diags +} + +/* DELETE */ + +func (model MaintenanceWindowModel) getMaintenanceWindowIDAndSpaceID() (maintenanceWindowID string, spaceID string) { + maintenanceWindowID = model.ID.ValueString() + spaceID = model.SpaceID.ValueString() + + resourceID := model.ID.ValueString() + maybeCompositeID, _ := clients.CompositeIdFromStr(resourceID) + if maybeCompositeID != nil { + maintenanceWindowID = maybeCompositeID.ResourceId + spaceID = maybeCompositeID.ClusterId + } + + return +} + +/* RESPONSE HANDLER */ + +func (model *MaintenanceWindowModel) _fromAPIResponse(ctx context.Context, response ResponseJson) diag.Diagnostics { + var diags = diag.Diagnostics{} + + model.Title = types.StringValue(response.Title) + model.Enabled = types.BoolValue(response.Enabled) + + model.CustomSchedule = MaintenanceWindowSchedule{ + Start: types.StringValue(response.Schedule.Custom.Start), + Duration: types.StringValue(response.Schedule.Custom.Duration), + Timezone: types.StringPointerValue(response.Schedule.Custom.Timezone), + Recurring: &MaintenanceWindowScheduleRecurring{ + End: types.StringNull(), + Every: types.StringNull(), + OnWeekDay: types.ListNull(types.StringType), + OnMonth: types.ListNull(types.Int32Type), + OnMonthDay: types.ListNull(types.Int32Type), + }, + } + + if response.Schedule.Custom.Recurring != nil { + model.CustomSchedule.Recurring.End = types.StringPointerValue(response.Schedule.Custom.Recurring.End) + model.CustomSchedule.Recurring.Every = types.StringPointerValue(response.Schedule.Custom.Recurring.Every) + + if response.Schedule.Custom.Recurring.Occurrences != nil { + occurrences := int32(*response.Schedule.Custom.Recurring.Occurrences) + model.CustomSchedule.Recurring.Occurrences = types.Int32PointerValue(&occurrences) + } + + if response.Schedule.Custom.Recurring.OnWeekDay != nil { + onWeekDay, d := types.ListValueFrom(ctx, types.StringType, response.Schedule.Custom.Recurring.OnWeekDay) + + if d.HasError() { + diags.Append(d...) + } else { + model.CustomSchedule.Recurring.OnWeekDay = onWeekDay + } + } + + if response.Schedule.Custom.Recurring.OnMonth != nil { + onMonth, d := types.ListValueFrom(ctx, types.Int32Type, response.Schedule.Custom.Recurring.OnMonth) + + if d.HasError() { + diags.Append(d...) + } else { + model.CustomSchedule.Recurring.OnMonth = onMonth + } + } + + if response.Schedule.Custom.Recurring.OnMonthDay != nil { + onMonthDay, d := types.ListValueFrom(ctx, types.Int32Type, response.Schedule.Custom.Recurring.OnMonthDay) + + if d.HasError() { + diags.Append(d...) + } else { + model.CustomSchedule.Recurring.OnMonthDay = onMonthDay + } + } + } + + if response.Scope != nil { + model.Scope = &MaintenanceWindowScope{ + Alerting: MaintenanceWindowAlertingScope{ + Kql: types.StringValue(response.Scope.Alerting.Query.Kql), + }, + } + } + + return diags +} + +/* HELPERS */ + +func (model *MaintenanceWindowScope) toAPIRequest() *struct { + Alerting struct { + Query struct { + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` +} { + if model == nil { + return nil + } + + return &struct { + Alerting struct { + Query struct { + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + }{ + Alerting: struct { + Query struct { + Kql string `json:"kql"` + } `json:"query"` + }{ + Query: struct { + Kql string `json:"kql"` + }{ + Kql: model.Alerting.Kql.ValueString(), + }, + }, + } +} + +func (model *MaintenanceWindowScheduleRecurring) toAPIRequest(ctx context.Context) (*struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` +}, diag.Diagnostics) { + if model == nil { + return nil, nil + } + + var diags diag.Diagnostics + result := &struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + }{} + + if utils.IsKnown(model.End) { + result.End = model.End.ValueStringPointer() + } + + if utils.IsKnown(model.Every) { + result.Every = model.Every.ValueStringPointer() + } + + if utils.IsKnown(model.Occurrences) { + occurrences := float32(model.Occurrences.ValueInt32()) + result.Occurrences = &occurrences + } + + if utils.IsKnown(model.OnWeekDay) { + var onWeekDay []string + diags.Append(model.OnWeekDay.ElementsAs(ctx, &onWeekDay, true)...) + result.OnWeekDay = &onWeekDay + } + + if utils.IsKnown(model.OnMonth) { + var onMonth []float32 + diags.Append(model.OnMonth.ElementsAs(ctx, &onMonth, true)...) + result.OnMonth = &onMonth + } + + if utils.IsKnown(model.OnMonthDay) { + var onMonthDay []float32 + diags.Append(model.OnMonthDay.ElementsAs(ctx, &onMonthDay, true)...) + result.OnMonthDay = &onMonthDay + } + + return result, diags +} diff --git a/internal/kibana/maintenance_window/models_test.go b/internal/kibana/maintenance_window/models_test.go new file mode 100644 index 000000000..84b641ce2 --- /dev/null +++ b/internal/kibana/maintenance_window/models_test.go @@ -0,0 +1,505 @@ +package maintenance_window + +import ( + "context" + "testing" + + "github.com/elastic/terraform-provider-elasticstack/generated/kbapi" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stretchr/testify/require" +) + +var modelWithAllFields = MaintenanceWindowModel{ + Title: types.StringValue("test response"), + Enabled: types.BoolValue(true), + + CustomSchedule: MaintenanceWindowSchedule{ + Start: types.StringValue("1993-01-01T05:00:00.200Z"), + Duration: types.StringValue("13d"), + Timezone: types.StringValue("America/Martinique"), + + Recurring: &MaintenanceWindowScheduleRecurring{ + Every: types.StringValue("21d"), + End: types.StringValue("2029-05-17T05:05:00.000Z"), + Occurrences: types.Int32Null(), + OnWeekDay: types.ListValueMust(types.StringType, []attr.Value{types.StringValue("MO"), types.StringValue("-2FR"), types.StringValue("+4SA")}), + OnMonth: types.ListValueMust(types.Int32Type, []attr.Value{types.Int32Value(6)}), + OnMonthDay: types.ListValueMust(types.Int32Type, []attr.Value{types.Int32Value(1), types.Int32Value(2), types.Int32Value(3)}), + }, + }, + + Scope: &MaintenanceWindowScope{ + Alerting: MaintenanceWindowAlertingScope{ + Kql: types.StringValue("_id: '1234'"), + }, + }, +} + +var modelOccurrencesNoScope = MaintenanceWindowModel{ + Title: types.StringValue("test response"), + Enabled: types.BoolValue(true), + + CustomSchedule: MaintenanceWindowSchedule{ + Start: types.StringValue("1993-01-01T05:00:00.200Z"), + Duration: types.StringValue("13d"), + Timezone: types.StringNull(), + + Recurring: &MaintenanceWindowScheduleRecurring{ + Every: types.StringValue("21d"), + End: types.StringNull(), + Occurrences: types.Int32Value(42), + OnWeekDay: types.ListNull(types.StringType), + OnMonth: types.ListNull(types.Int32Type), + OnMonthDay: types.ListNull(types.Int32Type), + }, + }, + + Scope: nil, +} + +func TestMaintenanceWindowFromAPI(t *testing.T) { + ctx := context.Background() + var diags diag.Diagnostics + + tests := []struct { + name string + response ResponseJson + existingModel MaintenanceWindowModel + expectedModel MaintenanceWindowModel + }{ + { + name: "all fields", + existingModel: MaintenanceWindowModel{}, + response: ResponseJson{ + Id: "existing-space-id/id", + CreatedAt: "created_at", + Enabled: true, + Title: "test response", + Schedule: ResponseJsonSchedule{ + Custom: ResponseJsonCustomSchedule{ + Start: "1993-01-01T05:00:00.200Z", + Duration: "13d", + Timezone: utils.Pointer("America/Martinique"), + Recurring: &ResponseJsonRecurring{ + Every: utils.Pointer("21d"), + End: utils.Pointer("2029-05-17T05:05:00.000Z"), + OnWeekDay: utils.Pointer([]string{"MO", "-2FR", "+4SA"}), + OnMonth: utils.Pointer([]float32{6}), + OnMonthDay: utils.Pointer([]float32{1, 2, 3}), + }, + }, + }, + Scope: &ResponseJsonScope{ + Alerting: ResponseJsonAlerting{ + Query: ResponseJsonAlertingQuery{ + Kql: "_id: '1234'", + }, + }, + }, + }, + expectedModel: modelWithAllFields, + }, + { + name: "occurrences and no scope", + existingModel: MaintenanceWindowModel{}, + response: ResponseJson{ + Id: "existing-space-id/id", + CreatedAt: "created_at", + Enabled: true, + Title: "test response", + Schedule: ResponseJsonSchedule{ + Custom: ResponseJsonCustomSchedule{ + Start: "1993-01-01T05:00:00.200Z", + Duration: "13d", + Recurring: &ResponseJsonRecurring{ + Every: utils.Pointer("21d"), + Occurrences: utils.Pointer(float32(42)), + }, + }, + }, + Scope: nil, + }, + expectedModel: modelOccurrencesNoScope, + }, + } + + require.Empty(t, diags) + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + diags := tt.existingModel._fromAPIResponse(ctx, tt.response) + + require.Equal(t, tt.expectedModel, tt.existingModel) + require.Empty(t, diags) + }) + } +} + +func TestMaintenanceWindowToAPICreateRequest(t *testing.T) { + ctx := context.Background() + var diags diag.Diagnostics + + tests := []struct { + name string + model MaintenanceWindowModel + expectedRequest kbapi.PostMaintenanceWindowJSONRequestBody + }{ + { + name: "all fields", + model: modelWithAllFields, + expectedRequest: kbapi.PostMaintenanceWindowJSONRequestBody{ + Enabled: utils.Pointer(true), + Title: "test response", + Schedule: struct { + Custom struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + }{ + Custom: struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + }{ + Start: "1993-01-01T05:00:00.200Z", + Duration: "13d", + Timezone: utils.Pointer("America/Martinique"), + Recurring: utils.Pointer(struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + }{ + Every: utils.Pointer("21d"), + End: utils.Pointer("2029-05-17T05:05:00.000Z"), + OnWeekDay: utils.Pointer([]string{"MO", "-2FR", "+4SA"}), + OnMonth: utils.Pointer([]float32{6}), + OnMonthDay: utils.Pointer([]float32{1, 2, 3}), + }), + }, + }, + Scope: utils.Pointer(struct { + Alerting struct { + Query struct { + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + }{ + Alerting: struct { + Query struct { + Kql string `json:"kql"` + } `json:"query"` + }{ + Query: struct { + Kql string `json:"kql"` + }{ + Kql: "_id: '1234'", + }, + }, + }, + ), + }, + }, + { + name: "occurrences and no scope", + model: modelOccurrencesNoScope, + expectedRequest: kbapi.PostMaintenanceWindowJSONRequestBody{ + Enabled: utils.Pointer(true), + Title: "test response", + Schedule: struct { + Custom struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + }{ + Custom: struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + }{ + Start: "1993-01-01T05:00:00.200Z", + Duration: "13d", + + Recurring: utils.Pointer(struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + }{ + Every: utils.Pointer("21d"), + Occurrences: utils.Pointer(float32(42)), + }), + }, + }, + }, + }, + } + + require.Empty(t, diags) + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + request, diags := tt.model.toAPICreateRequest(ctx) + require.Equal(t, request, tt.expectedRequest) + require.Empty(t, diags) + }) + } +} + +func TestMaintenanceWindowToAPIUpdateRequest(t *testing.T) { + ctx := context.Background() + var diags diag.Diagnostics + + tests := []struct { + name string + model MaintenanceWindowModel + expectedRequest kbapi.PatchMaintenanceWindowIdJSONRequestBody + }{ + { + name: "all fields", + model: modelWithAllFields, + expectedRequest: kbapi.PatchMaintenanceWindowIdJSONRequestBody{ + Enabled: utils.Pointer(true), + Title: utils.Pointer("test response"), + Schedule: utils.Pointer(struct { + Custom struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + }{ + Custom: struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + }{ + Start: "1993-01-01T05:00:00.200Z", + Duration: "13d", + Timezone: utils.Pointer("America/Martinique"), + Recurring: utils.Pointer(struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + }{ + Every: utils.Pointer("21d"), + End: utils.Pointer("2029-05-17T05:05:00.000Z"), + OnWeekDay: utils.Pointer([]string{"MO", "-2FR", "+4SA"}), + OnMonth: utils.Pointer([]float32{6}), + OnMonthDay: utils.Pointer([]float32{1, 2, 3}), + }), + }, + }), + Scope: utils.Pointer(struct { + Alerting struct { + Query struct { + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + }{ + Alerting: struct { + Query struct { + Kql string `json:"kql"` + } `json:"query"` + }{ + Query: struct { + Kql string `json:"kql"` + }{ + Kql: "_id: '1234'", + }, + }, + }, + ), + }, + }, + { + name: "just title, enabled and schedule", + model: MaintenanceWindowModel{ + ID: types.StringValue("/existing-space-id/id"), + Title: types.StringValue("test response"), + Enabled: types.BoolValue(true), + CustomSchedule: MaintenanceWindowSchedule{ + Start: types.StringValue("1993-01-01T05:00:00.200Z"), + Duration: types.StringValue("13d"), + }, + }, + expectedRequest: kbapi.PatchMaintenanceWindowIdJSONRequestBody{ + Enabled: utils.Pointer(true), + Title: utils.Pointer("test response"), + Schedule: utils.Pointer(struct { + Custom struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + }{ + Custom: struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + }{ + Start: "1993-01-01T05:00:00.200Z", + Duration: "13d", + }, + }), + }, + }, + { + name: "just the scope and schedule", + model: MaintenanceWindowModel{ + ID: types.StringValue("/existing-space-id/id"), + + CustomSchedule: MaintenanceWindowSchedule{ + Start: types.StringValue("1993-01-01T05:00:00.200Z"), + Duration: types.StringValue("13d"), + }, + + Scope: &MaintenanceWindowScope{ + Alerting: MaintenanceWindowAlertingScope{ + Kql: types.StringValue("_id: '1234'"), + }, + }, + }, + expectedRequest: kbapi.PatchMaintenanceWindowIdJSONRequestBody{ + Schedule: utils.Pointer(struct { + Custom struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + }{ + Custom: struct { + Duration string `json:"duration"` + Recurring *struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` + }{ + Start: "1993-01-01T05:00:00.200Z", + Duration: "13d", + }, + }), + + Scope: utils.Pointer(struct { + Alerting struct { + Query struct { + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + }{ + Alerting: struct { + Query struct { + Kql string `json:"kql"` + } `json:"query"` + }{ + Query: struct { + Kql string `json:"kql"` + }{ + Kql: "_id: '1234'", + }, + }, + }, + ), + }, + }, + } + + require.Empty(t, diags) + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + request, diags := tt.model.toAPIUpdateRequest(ctx) + require.Equal(t, request, tt.expectedRequest) + require.Empty(t, diags) + }) + } +} diff --git a/internal/kibana/maintenance_window/read.go b/internal/kibana/maintenance_window/read.go new file mode 100644 index 000000000..aafc2a315 --- /dev/null +++ b/internal/kibana/maintenance_window/read.go @@ -0,0 +1,62 @@ +package maintenance_window + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func (r *MaintenanceWindowResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var stateModel MaintenanceWindowModel + + req.State.GetAttribute(ctx, path.Root("id"), &stateModel.ID) + req.State.GetAttribute(ctx, path.Root("space_id"), &stateModel.SpaceID) + + serverVersion, sdkDiags := r.client.ServerVersion(ctx) + if sdkDiags.HasError() { + return + } + + serverFlavor, sdkDiags := r.client.ServerFlavor(ctx) + if sdkDiags.HasError() { + return + } + + diags := validateMaintenanceWindowServer(serverVersion, serverFlavor) + if diags.HasError() { + return + } + + client, err := r.client.GetKibanaOapiClient() + if err != nil { + resp.Diagnostics.AddError(err.Error(), "") + return + } + + maintenanceWindowID, spaceID := stateModel.getMaintenanceWindowIDAndSpaceID() + maintenanceWindow, diags := kibana_oapi.GetMaintenanceWindow(ctx, client, spaceID, maintenanceWindowID) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + if maintenanceWindow == nil { + resp.State.RemoveResource(ctx) + return + } + + diags = stateModel.fromAPIReadResponse(ctx, maintenanceWindow) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + stateModel.ID = types.StringValue(maintenanceWindowID) + stateModel.SpaceID = types.StringValue(spaceID) + + diags = resp.State.Set(ctx, stateModel) + resp.Diagnostics.Append(diags...) +} diff --git a/internal/kibana/maintenance_window/resource.go b/internal/kibana/maintenance_window/resource.go new file mode 100644 index 000000000..60ffb79a0 --- /dev/null +++ b/internal/kibana/maintenance_window/resource.go @@ -0,0 +1,40 @@ +package maintenance_window + +import ( + "context" + "fmt" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +var ( + _ resource.Resource = &MaintenanceWindowResource{} + _ resource.ResourceWithConfigure = &MaintenanceWindowResource{} + _ resource.ResourceWithImportState = &MaintenanceWindowResource{} +) + +// NewResource is a helper function to simplify the provider implementation. +func NewResource() resource.Resource { + return &MaintenanceWindowResource{} +} + +type MaintenanceWindowResource struct { + client *clients.ApiClient +} + +func (r *MaintenanceWindowResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + client, diags := clients.ConvertProviderData(req.ProviderData) + resp.Diagnostics.Append(diags...) + r.client = client +} + +// Metadata returns the provider type name. +func (r *MaintenanceWindowResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = fmt.Sprintf("%s_%s", req.ProviderTypeName, "kibana_maintenance_window") +} + +func (r *MaintenanceWindowResource) ImportState(ctx context.Context, request resource.ImportStateRequest, response *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("id"), request, response) +} diff --git a/internal/kibana/maintenance_window/response_types.go b/internal/kibana/maintenance_window/response_types.go new file mode 100644 index 000000000..e8c9198b3 --- /dev/null +++ b/internal/kibana/maintenance_window/response_types.go @@ -0,0 +1,49 @@ +package maintenance_window + +/* +* The types generated automatically for kibana_oapi are deeply nested a very hard to use. +* This file defines convenience types that can be used to define these neestes objects +* when needed. + */ + +type ResponseJson struct { + CreatedAt string `json:"created_at"` + CreatedBy *string `json:"created_by"` + Enabled bool `json:"enabled"` + Id string `json:"id"` + Schedule ResponseJsonSchedule `json:"schedule"` + Scope *ResponseJsonScope `json:"scope,omitempty"` + Title string `json:"title"` +} + +type ResponseJsonSchedule struct { + Custom ResponseJsonCustomSchedule `json:"custom"` +} + +type ResponseJsonCustomSchedule struct { + Duration string `json:"duration"` + Recurring *ResponseJsonRecurring `json:"recurring,omitempty"` + Start string `json:"start"` + Timezone *string `json:"timezone,omitempty"` +} + +type ResponseJsonRecurring struct { + End *string `json:"end,omitempty"` + Every *string `json:"every,omitempty"` + Occurrences *float32 `json:"occurrences,omitempty"` + OnMonth *[]float32 `json:"onMonth,omitempty"` + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + OnWeekDay *[]string `json:"onWeekDay,omitempty"` +} + +type ResponseJsonScope struct { + Alerting ResponseJsonAlerting `json:"alerting"` +} + +type ResponseJsonAlerting struct { + Query ResponseJsonAlertingQuery `json:"query"` +} + +type ResponseJsonAlertingQuery struct { + Kql string `json:"kql"` +} diff --git a/internal/kibana/maintenance_window/schema.go b/internal/kibana/maintenance_window/schema.go new file mode 100644 index 000000000..4ad24e1c5 --- /dev/null +++ b/internal/kibana/maintenance_window/schema.go @@ -0,0 +1,153 @@ +package maintenance_window + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/kibana/validators" + "github.com/hashicorp/terraform-plugin-framework-validators/int32validator" + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/booldefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func (r *MaintenanceWindowResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Manages Kibana maintenance windows", + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Computed: true, + MarkdownDescription: "Generated ID for the maintenance window.", + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "space_id": schema.StringAttribute{ + Description: "An identifier for the space. If space_id is not provided, the default space is used.", + Optional: true, + Computed: true, + Default: stringdefault.StaticString("default"), + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "title": schema.StringAttribute{ + Description: "The name of the maintenance window.", + Required: true, + Validators: []validator.String{ + stringvalidator.LengthAtLeast(1), + }, + }, + "enabled": schema.BoolAttribute{ + Description: "Whether the current maintenance window is enabled.", + Optional: true, + Computed: true, + Default: booldefault.StaticBool(false), + }, + "custom_schedule": schema.SingleNestedAttribute{ + Description: "A set schedule over which the maintenance window applies.", + Required: true, + Attributes: map[string]schema.Attribute{ + "start": schema.StringAttribute{ + Description: "The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`.", + Required: true, + Validators: []validator.String{ + validators.StringIsISO8601{}, + }, + }, + "duration": schema.StringAttribute{ + Description: "The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for days, hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`.", + Required: true, + Validators: []validator.String{ + validators.StringIsAlertingDuration{}, + }, + }, + "timezone": schema.StringAttribute{ + Description: "The timezone of the schedule. The default timezone is UTC.", + Optional: true, + Computed: true, + }, + "recurring": schema.SingleNestedAttribute{ + Description: "A set schedule over which the maintenance window applies.", + Required: true, + Attributes: map[string]schema.Attribute{ + "end": schema.StringAttribute{ + Description: "The end date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`.", + Optional: true, + Validators: []validator.String{ + validators.StringIsISO8601{}, + }, + }, + "every": schema.StringAttribute{ + Description: "The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for days, hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`.", + Optional: true, + Validators: []validator.String{ + validators.StringIsMaintenanceWindowIntervalFrequency{}, + }, + }, + "occurrences": schema.Int32Attribute{ + Description: "The total number of recurrences of the schedule.", + Optional: true, + Validators: []validator.Int32{ + int32validator.AtLeast(1), + }, + }, + "on_week_day": schema.ListAttribute{ + Description: "The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule.", + ElementType: types.StringType, + Optional: true, + Validators: []validator.List{ + listvalidator.ValueStringsAre( + validators.StringIsMaintenanceWindowOnWeekDay{}, + ), + }, + }, + "on_month_day": schema.ListAttribute{ + Description: "The specific days of the month for a recurring schedule. Valid values are 1-31.", + ElementType: types.Int32Type, + Optional: true, + Validators: []validator.List{ + listvalidator.ValueInt32sAre( + int32validator.Between(1, 31), + ), + }, + }, + "on_month": schema.ListAttribute{ + Description: "The specific months for a recurring schedule. Valid values are 1-12.", + ElementType: types.Int32Type, + Optional: true, + Validators: []validator.List{ + listvalidator.ValueInt32sAre( + int32validator.Between(1, 12), + ), + }, + }, + }, + }, + }, + }, + "scope": schema.SingleNestedAttribute{ + Description: "An object that narrows the scope of what is affected by this maintenance window.", + Optional: true, + Attributes: map[string]schema.Attribute{ + "alerting": schema.SingleNestedAttribute{ + Description: "A set schedule over which the maintenance window applies.", + Required: true, + Attributes: map[string]schema.Attribute{ + "kql": schema.StringAttribute{ + Description: "A filter written in Kibana Query Language (KQL).", + Required: true, + }, + }, + }, + }, + }, + }, + } +} diff --git a/internal/kibana/maintenance_window/update.go b/internal/kibana/maintenance_window/update.go new file mode 100644 index 000000000..2c0942fce --- /dev/null +++ b/internal/kibana/maintenance_window/update.go @@ -0,0 +1,80 @@ +package maintenance_window + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func (r *MaintenanceWindowResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var planMaintenanceWindow MaintenanceWindowModel + + diags := req.Plan.Get(ctx, &planMaintenanceWindow) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + serverVersion, sdkDiags := r.client.ServerVersion(ctx) + if sdkDiags.HasError() { + return + } + + serverFlavor, sdkDiags := r.client.ServerFlavor(ctx) + if sdkDiags.HasError() { + return + } + + diags = validateMaintenanceWindowServer(serverVersion, serverFlavor) + if diags.HasError() { + return + } + + client, err := r.client.GetKibanaOapiClient() + if err != nil { + resp.Diagnostics.AddError(err.Error(), "") + return + } + + body, diags := planMaintenanceWindow.toAPIUpdateRequest(ctx) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + maintenanceWindowID, spaceID := planMaintenanceWindow.getMaintenanceWindowIDAndSpaceID() + diags = kibana_oapi.UpdateMaintenanceWindow(ctx, client, spaceID, maintenanceWindowID, body) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + /* + * In create/update paths we typically follow the write operation with a read, and then set the state from the read. + * We want to avoid a dirty plan immediately after an apply. + */ + readMaintenanceWindowResponse, diags := kibana_oapi.GetMaintenanceWindow(ctx, client, spaceID, maintenanceWindowID) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + if readMaintenanceWindowResponse == nil { + resp.State.RemoveResource(ctx) + return + } + + diags = planMaintenanceWindow.fromAPIReadResponse(ctx, readMaintenanceWindowResponse) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + + planMaintenanceWindow.ID = types.StringValue(maintenanceWindowID) + planMaintenanceWindow.SpaceID = types.StringValue(spaceID) + + diags = resp.State.Set(ctx, planMaintenanceWindow) + resp.Diagnostics.Append(diags...) +} diff --git a/internal/kibana/maintenance_window/version_utils.go b/internal/kibana/maintenance_window/version_utils.go new file mode 100644 index 000000000..4c62d3507 --- /dev/null +++ b/internal/kibana/maintenance_window/version_utils.go @@ -0,0 +1,21 @@ +package maintenance_window + +import ( + "fmt" + + "github.com/hashicorp/go-version" + "github.com/hashicorp/terraform-plugin-framework/diag" +) + +func validateMaintenanceWindowServer(serverVersion *version.Version, serverFlavor string) diag.Diagnostics { + var serverlessFlavor = "serverless" + var maintenanceWindowPublicAPIMinSupportedVersion = version.Must(version.NewVersion("9.1.0")) + var diags diag.Diagnostics + + if serverVersion.LessThan(maintenanceWindowPublicAPIMinSupportedVersion) && serverFlavor != serverlessFlavor { + diags.AddError("Maintenance window API not supported", fmt.Sprintf(`The maintenance Window public API feature requires a minimum Elasticsearch version of "%s" or a serverless Kibana instance.`, maintenanceWindowPublicAPIMinSupportedVersion)) + return diags + } + + return nil +} diff --git a/internal/kibana/validators/is_alerting_duration.go b/internal/kibana/validators/is_alerting_duration.go new file mode 100644 index 000000000..715b91769 --- /dev/null +++ b/internal/kibana/validators/is_alerting_duration.go @@ -0,0 +1,49 @@ +package validators + +import ( + "context" + "regexp" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" +) + +var alertingDurationPattern = "^[1-9][0-9]*(?:d|h|m|s)$" + +func StringMatchesAlertingDurationRegex(s string) (matched bool, err error) { + return regexp.MatchString(alertingDurationPattern, s) +} + +type StringIsAlertingDuration struct{} + +func (s StringIsAlertingDuration) Description(_ context.Context) string { + return "a valid alerting duration in seconds (s), minutes (m), hours (h), or days (d)" +} + +func (s StringIsAlertingDuration) MarkdownDescription(ctx context.Context) string { + return s.Description(ctx) +} + +func (s StringIsAlertingDuration) ValidateString(_ context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + if matched, err := StringMatchesAlertingDurationRegex(req.ConfigValue.ValueString()); err != nil || !matched { + resp.Diagnostics.AddAttributeError( + req.Path, + "expected value to be a valid alerting duration", + "This value must be a valid alerting duration in seconds (s), minutes (m), hours (h), or days (d).", + ) + return + } +} + +// Avoid lint error on deprecated SchemaValidateFunc usage. +// +//nolint:staticcheck +func StringIsAlertingDurationSDKV2() schema.SchemaValidateFunc { + r := regexp.MustCompile(alertingDurationPattern) + return validation.StringMatch(r, "string is not a valid Alerting duration in seconds (s), minutes (m), hours (h), or days (d).") +} diff --git a/internal/kibana/validators/is_iso8601_string.go b/internal/kibana/validators/is_iso8601_string.go new file mode 100644 index 000000000..ba5dd31b0 --- /dev/null +++ b/internal/kibana/validators/is_iso8601_string.go @@ -0,0 +1,38 @@ +package validators + +import ( + "context" + "regexp" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +func StringMatchesISO8601Regex(s string) (matched bool, err error) { + pattern := `(\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d\.\d+([+-][0-2]\d:[0-5]\d|Z))|(\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d:[0-5]\d([+-][0-2]\d:[0-5]\d|Z))|(\d{4}-[01]\d-[0-3]\dT[0-2]\d:[0-5]\d([+-][0-2]\d:[0-5]\d|Z))` + return regexp.MatchString(pattern, s) +} + +type StringIsISO8601 struct{} + +func (s StringIsISO8601) Description(_ context.Context) string { + return "a valid ISO8601 date and time formatted string" +} + +func (s StringIsISO8601) MarkdownDescription(ctx context.Context) string { + return s.Description(ctx) +} + +func (s StringIsISO8601) ValidateString(_ context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + if matched, err := StringMatchesISO8601Regex(req.ConfigValue.ValueString()); err != nil || !matched { + resp.Diagnostics.AddAttributeError( + req.Path, + "expected value to be a valid ISO8601 string", + "This value must be a valid ISO8601 date and time formatted string.", + ) + return + } +} diff --git a/internal/kibana/validators/is_maintenance_window_interval.go b/internal/kibana/validators/is_maintenance_window_interval.go new file mode 100644 index 000000000..3f0f6c1a6 --- /dev/null +++ b/internal/kibana/validators/is_maintenance_window_interval.go @@ -0,0 +1,38 @@ +package validators + +import ( + "context" + "regexp" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +func StringMatchesIntervalFrequencyRegex(s string) (matched bool, err error) { + pattern := `^[1-9][0-9]*(?:d|w|M|y)$` + return regexp.MatchString(pattern, s) +} + +type StringIsMaintenanceWindowIntervalFrequency struct{} + +func (s StringIsMaintenanceWindowIntervalFrequency) Description(_ context.Context) string { + return "a valid interval/frequency. Allowed values are in the `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`." +} + +func (s StringIsMaintenanceWindowIntervalFrequency) MarkdownDescription(ctx context.Context) string { + return s.Description(ctx) +} + +func (s StringIsMaintenanceWindowIntervalFrequency) ValidateString(_ context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + if matched, err := StringMatchesIntervalFrequencyRegex(req.ConfigValue.ValueString()); err != nil || !matched { + resp.Diagnostics.AddAttributeError( + req.Path, + "expected value to be a valid interval/frequency", + "This value must be a valid interval/frequency. Allowed values are in the `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`.", + ) + return + } +} diff --git a/internal/kibana/validators/is_maintenance_window_week_day.go b/internal/kibana/validators/is_maintenance_window_week_day.go new file mode 100644 index 000000000..07c4cce48 --- /dev/null +++ b/internal/kibana/validators/is_maintenance_window_week_day.go @@ -0,0 +1,38 @@ +package validators + +import ( + "context" + "regexp" + + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +func StringMatchesOnWeekDayRegex(s string) (matched bool, err error) { + pattern := `^(((\+|-)[1-5])?(MO|TU|WE|TH|FR|SA|SU))$` + return regexp.MatchString(pattern, s) +} + +type StringIsMaintenanceWindowOnWeekDay struct{} + +func (s StringIsMaintenanceWindowOnWeekDay) Description(_ context.Context) string { + return "a valid OnWeekDay. Accepted values are specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`)." +} + +func (s StringIsMaintenanceWindowOnWeekDay) MarkdownDescription(ctx context.Context) string { + return s.Description(ctx) +} + +func (s StringIsMaintenanceWindowOnWeekDay) ValidateString(_ context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if req.ConfigValue.IsNull() || req.ConfigValue.IsUnknown() { + return + } + + if matched, err := StringMatchesOnWeekDayRegex(req.ConfigValue.ValueString()); err != nil || !matched { + resp.Diagnostics.AddAttributeError( + req.Path, + "expected value to be a valid OnWeekDay", + "This value must be a valid OnWeekDay. Accepted values are specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`).", + ) + return + } +} diff --git a/internal/kibana/validators/validators_test.go b/internal/kibana/validators/validators_test.go new file mode 100644 index 000000000..16ebbab9e --- /dev/null +++ b/internal/kibana/validators/validators_test.go @@ -0,0 +1,256 @@ +package validators + +import ( + "reflect" + "testing" +) + +func TestStringMatchesAlertingDuration(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + duration string + matched bool + }{ + { + name: "valid Alerting duration string (30d)", + duration: "30d", + matched: true, + }, + { + name: "invalid Alerting duration unit (0s)", + duration: "0s", + matched: false, + }, + { + name: "invalid Alerting duration value (.12y)", + duration: ".12y", + matched: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + matched, _ := StringMatchesAlertingDurationRegex(tt.duration) + if !reflect.DeepEqual(matched, tt.matched) { + t.Errorf("StringMatchesAlertingDurationRegex() failed match = %v, want %v", matched, tt.matched) + } + }) + } +} + +func TestStringMatchesISO8601(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + date string + matched bool + }{ + { + name: "valid complete date 1", + date: "1994-11-05T13:15:30Z", + matched: true, + }, + { + name: "valid complete date 2", + date: "1997-07-04T19:20+01:00", + matched: true, + }, + { + name: "valid complete date 3", + date: "1994-11-05T08:15:30-05:00", + matched: true, + }, + { + name: "valid complete date plus hours, minutes and seconds", + date: "1997-07-16T19:20:30+01:00", + matched: true, + }, + { + name: "valid complete date plus hours, minutes, seconds and a decimal fraction of a second", + date: "1997-07-16T19:20:30.45+01:00", + matched: true, + }, { + name: "invalid year", + date: "1997", + matched: false, + }, + { + name: "invalid year and month", + date: "1997-07", + matched: false, + }, + { + name: "invalid complete date", + date: "1997-07-04", + matched: false, + }, + { + name: "invalid hours and minutes", + date: "1997-40-04T30:220+01:00", + matched: false, + }, + { + name: "invalid seconds", + date: "1997-07-16T19:20:80+01:00", + matched: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + matched, _ := StringMatchesISO8601Regex(tt.date) + if !reflect.DeepEqual(matched, tt.matched) { + t.Errorf("StringMatchesISO8601Regex() failed match = %v, want %v", matched, tt.matched) + } + }) + } +} + +func TestStringMatchesOnWeekDay(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + onWeekDay string + matched bool + }{ + { + name: "valid on_week_day string (+1MO)", + onWeekDay: "+1MO", + matched: true, + }, + { + name: "valid on_week_day string (+2TU)", + onWeekDay: "+2TU", + matched: true, + }, + { + name: "valid on_week_day string (+3WE)", + onWeekDay: "+3WE", + matched: true, + }, + { + name: "valid on_week_day string (+4TH)", + onWeekDay: "+4TH", + matched: true, + }, + { + name: "valid on_week_day string (+5FR)", + onWeekDay: "+5FR", + matched: true, + }, + { + name: "valid on_week_day string (-5SA)", + onWeekDay: "-5SA", + matched: true, + }, + { + name: "valid on_week_day string (-4SU)", + onWeekDay: "-4SU", + matched: true, + }, + { + name: "valid on_week_day string (-3MO)", + onWeekDay: "-3MO", + matched: true, + }, + { + name: "valid on_week_day string (-2TU)", + onWeekDay: "-2TU", + matched: true, + }, + { + name: "valid on_week_day string (-1WE)", + onWeekDay: "-1WE", + matched: true, + }, + { + name: "invalid on_week_day unit (FOOBAR)", + onWeekDay: "FOOBAR", + matched: false, + }, + { + name: "invalid on_week_day string (+9MO)", + onWeekDay: "+9MO", + matched: false, + }, + { + name: "invalid on_week_day string (-7FR)", + onWeekDay: "-7FR", + matched: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + matched, _ := StringMatchesOnWeekDayRegex(tt.onWeekDay) + if !reflect.DeepEqual(matched, tt.matched) { + t.Errorf("StringMatchesOnWeekDayRegex() failed match = %v, want %v", matched, tt.matched) + } + }) + } +} + +func TestStringMatchesIntervalFrequencyRegex(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + intervalFrequency string + matched bool + }{ + + { + name: "valid interval/frequency string (2d)", + intervalFrequency: "2d", + matched: true, + }, + { + name: "valid interval/frequency string (5w)", + intervalFrequency: "5w", + matched: true, + }, + { + name: "valid interval/frequency string (3M)", + intervalFrequency: "3M", + matched: true, + }, + { + name: "valid interval/frequency string (1y)", + intervalFrequency: "1y", + matched: true, + }, + { + name: "invalid interval/frequency string (5m)", + intervalFrequency: "5m", + matched: false, + }, + { + name: "invalid interval/frequency string (-1w)", + intervalFrequency: "-1w", + matched: false, + }, + { + name: "invalid interval/frequency string (invalid)", + intervalFrequency: "invalid", + matched: false, + }, + { + name: "invalid interval/frequency empty string", + intervalFrequency: " ", + matched: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + matched, _ := StringMatchesIntervalFrequencyRegex(tt.intervalFrequency) + if !reflect.DeepEqual(matched, tt.matched) { + t.Errorf("StringMatchesOnWeekDayRegex() failed match = %v, want %v", matched, tt.matched) + } + }) + } +} diff --git a/provider/plugin_framework.go b/provider/plugin_framework.go index 4cb4408fe..4da1e743d 100644 --- a/provider/plugin_framework.go +++ b/provider/plugin_framework.go @@ -22,6 +22,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/fleet/server_host" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/data_view" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/import_saved_objects" + "github.com/elastic/terraform-provider-elasticstack/internal/kibana/maintenance_window" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/spaces" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/synthetics" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/synthetics/parameter" @@ -110,6 +111,7 @@ func (p *Provider) Resources(ctx context.Context) []func() resource.Resource { output.NewResource, server_host.NewResource, system_user.NewSystemUserResource, + maintenance_window.NewResource, enrich.NewEnrichPolicyResource, role_mapping.NewRoleMappingResource, } diff --git a/templates/resources/kibana_maintenance_window.md.tmpl b/templates/resources/kibana_maintenance_window.md.tmpl new file mode 100644 index 000000000..e7fddee61 --- /dev/null +++ b/templates/resources/kibana_maintenance_window.md.tmpl @@ -0,0 +1,23 @@ +--- +subcategory: "Kibana" +layout: "" +page_title: "Elasticstack: elasticstack_kibana_maintenance_window Resource" +description: |- + Manages Kibana maintenance windows. +--- + +# Resource: elasticstack_kibana_maintenance_window + +Creates and manages Kibana [maintenance windows](https://www.elastic.co/docs/api/doc/kibana/group/endpoint-maintenance-window) + +## Example Usage + +{{ tffile "examples/resources/elasticstack_kibana_maintenance_window/resource.tf" }} + +{{ .SchemaMarkdown | trimspace }} + +## Import + +Import is supported using the following syntax: + +{{ codefile "shell" "examples/resources/elasticstack_kibana_maintenance_window/import.sh" }} From 99dadf8c4d32a4ce78076c51e738a6c602f33463 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 8 Sep 2025 19:06:32 +1000 Subject: [PATCH 45/66] Bump actions/setup-go from 5.5.0 to 6.0.0 (#1286) Bumps [actions/setup-go](https://github.com/actions/setup-go) from 5.5.0 to 6.0.0. - [Release notes](https://github.com/actions/setup-go/releases) - [Commits](https://github.com/actions/setup-go/compare/d35c59abb061a4a6fb18e82ac0862c26744d6ab5...44694675825211faa026b3c33043df3e48a5fa00) --- updated-dependencies: - dependency-name: actions/setup-go dependency-version: 6.0.0 dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- .github/workflows/test.yml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index cc08bc2b9..ab48e533a 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -24,7 +24,7 @@ jobs: steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 + - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v5 with: go-version-file: 'go.mod' cache: true diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index cf08fedc0..ad37a2317 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -19,7 +19,7 @@ jobs: timeout-minutes: 5 steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 + - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v5 with: go-version-file: 'go.mod' cache: true @@ -35,7 +35,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 + - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v5 with: go-version-file: 'go.mod' cache: true @@ -130,7 +130,7 @@ jobs: - '9.0.3' steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 + - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v5 with: go-version-file: 'go.mod' cache: true From a2c34c6ec8fddaa17f89aaa8bebf672b505a5a8e Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Mon, 8 Sep 2025 22:17:47 +1000 Subject: [PATCH 46/66] chore(deps): update actions/setup-go digest to d35c59a (#1288) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- .github/workflows/test.yml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index ab48e533a..cc08bc2b9 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -24,7 +24,7 @@ jobs: steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v5 + - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' cache: true diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ad37a2317..cf08fedc0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -19,7 +19,7 @@ jobs: timeout-minutes: 5 steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v5 + - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' cache: true @@ -35,7 +35,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v5 + - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' cache: true @@ -130,7 +130,7 @@ jobs: - '9.0.3' steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v5 + - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 with: go-version-file: 'go.mod' cache: true From 99531a159dc98099c4ba861f9a3bcb43cc4fe8ab Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Tue, 9 Sep 2025 06:34:09 +1000 Subject: [PATCH 47/66] Don't allowlist API operations within the Kibana API spec (#1270) * Don't allowlist API operations within the Kibana API spec Instead generate a client covering all available endpoints * yield return on patch --- generated/kbapi/kibana.gen.go | 154460 ++++++++++++++++++++--- generated/kbapi/transform_schema.go | 134 +- internal/clients/fleet/fleet.go | 2 +- internal/fleet/agent_policy/models.go | 4 +- 4 files changed, 136207 insertions(+), 18393 deletions(-) diff --git a/generated/kbapi/kibana.gen.go b/generated/kbapi/kibana.gen.go index 0ac695f8f..97e146c28 100644 --- a/generated/kbapi/kibana.gen.go +++ b/generated/kbapi/kibana.gen.go @@ -13,8 +13,10 @@ import ( "net/http" "net/url" "strings" + "time" "github.com/oapi-codegen/runtime" + openapi_types "github.com/oapi-codegen/runtime/types" ) const ( @@ -22,24178 +24,142099 @@ const ( BasicAuthScopes = "basicAuth.Scopes" ) -// Defines values for DataViews404ResponseError. +// Defines values for APMUIAgentKeysObjectPrivileges. const ( - NotFound DataViews404ResponseError = "Not Found" + ConfigAgentRead APMUIAgentKeysObjectPrivileges = "config_agent:read" + EventWrite APMUIAgentKeysObjectPrivileges = "event:write" ) -// Defines values for DataViews404ResponseStatusCode. +// Defines values for APMUIAnnotationSearchResponseAnnotationsType. const ( - N404 DataViews404ResponseStatusCode = 404 + Version APMUIAnnotationSearchResponseAnnotationsType = "version" ) -// Defines values for AgentPolicyMonitoringEnabled. +// Defines values for Alerting401ResponseError. const ( - AgentPolicyMonitoringEnabledLogs AgentPolicyMonitoringEnabled = "logs" - AgentPolicyMonitoringEnabledMetrics AgentPolicyMonitoringEnabled = "metrics" - AgentPolicyMonitoringEnabledTraces AgentPolicyMonitoringEnabled = "traces" + Unauthorized Alerting401ResponseError = "Unauthorized" ) -// Defines values for AgentPolicyPackagePolicies1Inputs0StreamsRelease. +// Defines values for Alerting401ResponseStatusCode. const ( - AgentPolicyPackagePolicies1Inputs0StreamsReleaseBeta AgentPolicyPackagePolicies1Inputs0StreamsRelease = "beta" - AgentPolicyPackagePolicies1Inputs0StreamsReleaseExperimental AgentPolicyPackagePolicies1Inputs0StreamsRelease = "experimental" - AgentPolicyPackagePolicies1Inputs0StreamsReleaseGa AgentPolicyPackagePolicies1Inputs0StreamsRelease = "ga" + N401 Alerting401ResponseStatusCode = 401 ) -// Defines values for AgentPolicyStatus. +// Defines values for CasesActions. const ( - Active AgentPolicyStatus = "active" - Inactive AgentPolicyStatus = "inactive" + CasesActionsAdd CasesActions = "add" + CasesActionsCreate CasesActions = "create" + CasesActionsDelete CasesActions = "delete" + CasesActionsPushToService CasesActions = "push_to_service" + CasesActionsUpdate CasesActions = "update" ) -// Defines values for AuthType. +// Defines values for CasesAddAlertCommentRequestPropertiesType. const ( - WebhookAuthenticationBasic AuthType = "webhook-authentication-basic" - WebhookAuthenticationSsl AuthType = "webhook-authentication-ssl" + CasesAddAlertCommentRequestPropertiesTypeAlert CasesAddAlertCommentRequestPropertiesType = "alert" ) -// Defines values for CasesWebhookConfigCreateCommentMethod. +// Defines values for CasesAddUserCommentRequestPropertiesType. const ( - CasesWebhookConfigCreateCommentMethodPatch CasesWebhookConfigCreateCommentMethod = "patch" - CasesWebhookConfigCreateCommentMethodPost CasesWebhookConfigCreateCommentMethod = "post" - CasesWebhookConfigCreateCommentMethodPut CasesWebhookConfigCreateCommentMethod = "put" + CasesAddUserCommentRequestPropertiesTypeUser CasesAddUserCommentRequestPropertiesType = "user" ) -// Defines values for CasesWebhookConfigCreateIncidentMethod. +// Defines values for CasesAlertCommentResponsePropertiesType. const ( - CasesWebhookConfigCreateIncidentMethodPatch CasesWebhookConfigCreateIncidentMethod = "patch" - CasesWebhookConfigCreateIncidentMethodPost CasesWebhookConfigCreateIncidentMethod = "post" - CasesWebhookConfigCreateIncidentMethodPut CasesWebhookConfigCreateIncidentMethod = "put" + CasesAlertCommentResponsePropertiesTypeAlert CasesAlertCommentResponsePropertiesType = "alert" ) -// Defines values for CasesWebhookConfigUpdateIncidentMethod. +// Defines values for CasesCaseResponsePropertiesCustomFieldsType. const ( - CasesWebhookConfigUpdateIncidentMethodPatch CasesWebhookConfigUpdateIncidentMethod = "patch" - CasesWebhookConfigUpdateIncidentMethodPost CasesWebhookConfigUpdateIncidentMethod = "post" - CasesWebhookConfigUpdateIncidentMethodPut CasesWebhookConfigUpdateIncidentMethod = "put" + CasesCaseResponsePropertiesCustomFieldsTypeText CasesCaseResponsePropertiesCustomFieldsType = "text" + CasesCaseResponsePropertiesCustomFieldsTypeToggle CasesCaseResponsePropertiesCustomFieldsType = "toggle" ) -// Defines values for CertType. +// Defines values for CasesCaseSeverity. const ( - SslCrtKey CertType = "ssl-crt-key" - SslPfx CertType = "ssl-pfx" + CasesCaseSeverityCritical CasesCaseSeverity = "critical" + CasesCaseSeverityHigh CasesCaseSeverity = "high" + CasesCaseSeverityLow CasesCaseSeverity = "low" + CasesCaseSeverityMedium CasesCaseSeverity = "medium" ) -// Defines values for EmailConfigService. +// Defines values for CasesCaseStatus. const ( - EmailConfigServiceElasticCloud EmailConfigService = "elastic_cloud" - EmailConfigServiceExchangeServer EmailConfigService = "exchange_server" - EmailConfigServiceGmail EmailConfigService = "gmail" - EmailConfigServiceOther EmailConfigService = "other" - EmailConfigServiceOutlook365 EmailConfigService = "outlook365" - EmailConfigServiceSes EmailConfigService = "ses" + CasesCaseStatusClosed CasesCaseStatus = "closed" + CasesCaseStatusInProgress CasesCaseStatus = "in-progress" + CasesCaseStatusOpen CasesCaseStatus = "open" ) -// Defines values for GenaiAzureConfigApiProvider. +// Defines values for CasesClosureTypes. const ( - AzureOpenAI GenaiAzureConfigApiProvider = "Azure OpenAI" + CloseByPushing CasesClosureTypes = "close-by-pushing" + CloseByUser CasesClosureTypes = "close-by-user" ) -// Defines values for GenaiOpenaiConfigApiProvider. +// Defines values for CasesConnectorPropertiesCasesWebhookType. const ( - OpenAI GenaiOpenaiConfigApiProvider = "OpenAI" + CasesConnectorPropertiesCasesWebhookTypeDotCasesWebhook CasesConnectorPropertiesCasesWebhookType = ".cases-webhook" ) -// Defines values for GenaiOpenaiOtherConfigApiProvider. +// Defines values for CasesConnectorPropertiesJiraType. const ( - GenaiOpenaiOtherConfigApiProviderOther GenaiOpenaiOtherConfigApiProvider = "Other" + DotJira CasesConnectorPropertiesJiraType = ".jira" ) -// Defines values for GenaiOpenaiOtherConfigVerificationMode. +// Defines values for CasesConnectorPropertiesNoneType. const ( - GenaiOpenaiOtherConfigVerificationModeCertificate GenaiOpenaiOtherConfigVerificationMode = "certificate" - GenaiOpenaiOtherConfigVerificationModeFull GenaiOpenaiOtherConfigVerificationMode = "full" - GenaiOpenaiOtherConfigVerificationModeNone GenaiOpenaiOtherConfigVerificationMode = "none" + DotNone CasesConnectorPropertiesNoneType = ".none" ) -// Defines values for NewOutputElasticsearchPreset. +// Defines values for CasesConnectorPropertiesResilientType. const ( - NewOutputElasticsearchPresetBalanced NewOutputElasticsearchPreset = "balanced" - NewOutputElasticsearchPresetCustom NewOutputElasticsearchPreset = "custom" - NewOutputElasticsearchPresetLatency NewOutputElasticsearchPreset = "latency" - NewOutputElasticsearchPresetScale NewOutputElasticsearchPreset = "scale" - NewOutputElasticsearchPresetThroughput NewOutputElasticsearchPreset = "throughput" + DotResilient CasesConnectorPropertiesResilientType = ".resilient" ) -// Defines values for NewOutputElasticsearchType. +// Defines values for CasesConnectorPropertiesServicenowType. const ( - NewOutputElasticsearchTypeElasticsearch NewOutputElasticsearchType = "elasticsearch" + DotServicenow CasesConnectorPropertiesServicenowType = ".servicenow" ) -// Defines values for NewOutputKafkaAuthType. +// Defines values for CasesConnectorPropertiesServicenowSirType. const ( - NewOutputKafkaAuthTypeKerberos NewOutputKafkaAuthType = "kerberos" - NewOutputKafkaAuthTypeNone NewOutputKafkaAuthType = "none" - NewOutputKafkaAuthTypeSsl NewOutputKafkaAuthType = "ssl" - NewOutputKafkaAuthTypeUserPass NewOutputKafkaAuthType = "user_pass" + DotServicenowSir CasesConnectorPropertiesServicenowSirType = ".servicenow-sir" ) -// Defines values for NewOutputKafkaCompression. +// Defines values for CasesConnectorPropertiesSwimlaneType. const ( - NewOutputKafkaCompressionGzip NewOutputKafkaCompression = "gzip" - NewOutputKafkaCompressionLz4 NewOutputKafkaCompression = "lz4" - NewOutputKafkaCompressionNone NewOutputKafkaCompression = "none" - NewOutputKafkaCompressionSnappy NewOutputKafkaCompression = "snappy" + DotSwimlane CasesConnectorPropertiesSwimlaneType = ".swimlane" ) -// Defines values for NewOutputKafkaPartition. +// Defines values for CasesConnectorTypes. const ( - NewOutputKafkaPartitionHash NewOutputKafkaPartition = "hash" - NewOutputKafkaPartitionRandom NewOutputKafkaPartition = "random" - NewOutputKafkaPartitionRoundRobin NewOutputKafkaPartition = "round_robin" + CasesConnectorTypesDotCasesWebhook CasesConnectorTypes = ".cases-webhook" + CasesConnectorTypesDotJira CasesConnectorTypes = ".jira" + CasesConnectorTypesDotNone CasesConnectorTypes = ".none" + CasesConnectorTypesDotResilient CasesConnectorTypes = ".resilient" + CasesConnectorTypesDotServicenow CasesConnectorTypes = ".servicenow" + CasesConnectorTypesDotServicenowSir CasesConnectorTypes = ".servicenow-sir" + CasesConnectorTypesDotSwimlane CasesConnectorTypes = ".swimlane" ) -// Defines values for NewOutputKafkaRequiredAcks. +// Defines values for CasesCreateCaseRequestCustomFieldsType. const ( - NewOutputKafkaRequiredAcksMinus1 NewOutputKafkaRequiredAcks = -1 - NewOutputKafkaRequiredAcksN0 NewOutputKafkaRequiredAcks = 0 - NewOutputKafkaRequiredAcksN1 NewOutputKafkaRequiredAcks = 1 + CasesCreateCaseRequestCustomFieldsTypeText CasesCreateCaseRequestCustomFieldsType = "text" + CasesCreateCaseRequestCustomFieldsTypeToggle CasesCreateCaseRequestCustomFieldsType = "toggle" ) -// Defines values for NewOutputKafkaSaslMechanism. +// Defines values for CasesOwner. const ( - NewOutputKafkaSaslMechanismPLAIN NewOutputKafkaSaslMechanism = "PLAIN" - NewOutputKafkaSaslMechanismSCRAMSHA256 NewOutputKafkaSaslMechanism = "SCRAM-SHA-256" - NewOutputKafkaSaslMechanismSCRAMSHA512 NewOutputKafkaSaslMechanism = "SCRAM-SHA-512" + CasesOwnerCases CasesOwner = "cases" + CasesOwnerObservability CasesOwner = "observability" + CasesOwnerSecuritySolution CasesOwner = "securitySolution" ) -// Defines values for NewOutputKafkaType. +// Defines values for CasesPayloadAlertCommentCommentType. const ( - NewOutputKafkaTypeKafka NewOutputKafkaType = "kafka" + CasesPayloadAlertCommentCommentTypeAlert CasesPayloadAlertCommentCommentType = "alert" ) -// Defines values for NewOutputLogstashType. +// Defines values for CasesPayloadUserCommentCommentType. const ( - NewOutputLogstashTypeLogstash NewOutputLogstashType = "logstash" + CasesPayloadUserCommentCommentTypeUser CasesPayloadUserCommentCommentType = "user" ) -// Defines values for NewOutputRemoteElasticsearchPreset. +// Defines values for CasesSearchFieldsType. const ( - NewOutputRemoteElasticsearchPresetBalanced NewOutputRemoteElasticsearchPreset = "balanced" - NewOutputRemoteElasticsearchPresetCustom NewOutputRemoteElasticsearchPreset = "custom" - NewOutputRemoteElasticsearchPresetLatency NewOutputRemoteElasticsearchPreset = "latency" - NewOutputRemoteElasticsearchPresetScale NewOutputRemoteElasticsearchPreset = "scale" - NewOutputRemoteElasticsearchPresetThroughput NewOutputRemoteElasticsearchPreset = "throughput" + CasesSearchFieldsTypeDescription CasesSearchFieldsType = "description" + CasesSearchFieldsTypeTitle CasesSearchFieldsType = "title" ) -// Defines values for NewOutputRemoteElasticsearchType. +// Defines values for CasesSetCaseConfigurationRequestCustomFieldsType. const ( - NewOutputRemoteElasticsearchTypeRemoteElasticsearch NewOutputRemoteElasticsearchType = "remote_elasticsearch" + CasesSetCaseConfigurationRequestCustomFieldsTypeText CasesSetCaseConfigurationRequestCustomFieldsType = "text" + CasesSetCaseConfigurationRequestCustomFieldsTypeToggle CasesSetCaseConfigurationRequestCustomFieldsType = "toggle" ) -// Defines values for NewOutputSslVerificationMode. +// Defines values for CasesTemplatesCaseFieldsCustomFieldsType. const ( - NewOutputSslVerificationModeCertificate NewOutputSslVerificationMode = "certificate" - NewOutputSslVerificationModeFull NewOutputSslVerificationMode = "full" - NewOutputSslVerificationModeNone NewOutputSslVerificationMode = "none" - NewOutputSslVerificationModeStrict NewOutputSslVerificationMode = "strict" + CasesTemplatesCaseFieldsCustomFieldsTypeText CasesTemplatesCaseFieldsCustomFieldsType = "text" + CasesTemplatesCaseFieldsCustomFieldsTypeToggle CasesTemplatesCaseFieldsCustomFieldsType = "toggle" ) -// Defines values for OutputElasticsearchPreset. +// Defines values for CasesUpdateAlertCommentRequestPropertiesType. const ( - OutputElasticsearchPresetBalanced OutputElasticsearchPreset = "balanced" - OutputElasticsearchPresetCustom OutputElasticsearchPreset = "custom" - OutputElasticsearchPresetLatency OutputElasticsearchPreset = "latency" - OutputElasticsearchPresetScale OutputElasticsearchPreset = "scale" - OutputElasticsearchPresetThroughput OutputElasticsearchPreset = "throughput" + CasesUpdateAlertCommentRequestPropertiesTypeAlert CasesUpdateAlertCommentRequestPropertiesType = "alert" ) -// Defines values for OutputElasticsearchType. +// Defines values for CasesUpdateCaseConfigurationRequestCustomFieldsType. const ( - OutputElasticsearchTypeElasticsearch OutputElasticsearchType = "elasticsearch" + CasesUpdateCaseConfigurationRequestCustomFieldsTypeText CasesUpdateCaseConfigurationRequestCustomFieldsType = "text" + CasesUpdateCaseConfigurationRequestCustomFieldsTypeToggle CasesUpdateCaseConfigurationRequestCustomFieldsType = "toggle" ) -// Defines values for OutputKafkaAuthType. +// Defines values for CasesUpdateCaseRequestCasesCustomFieldsType. const ( - OutputKafkaAuthTypeKerberos OutputKafkaAuthType = "kerberos" - OutputKafkaAuthTypeNone OutputKafkaAuthType = "none" - OutputKafkaAuthTypeSsl OutputKafkaAuthType = "ssl" - OutputKafkaAuthTypeUserPass OutputKafkaAuthType = "user_pass" + CasesUpdateCaseRequestCasesCustomFieldsTypeText CasesUpdateCaseRequestCasesCustomFieldsType = "text" + CasesUpdateCaseRequestCasesCustomFieldsTypeToggle CasesUpdateCaseRequestCasesCustomFieldsType = "toggle" ) -// Defines values for OutputKafkaCompression. +// Defines values for CasesUpdateUserCommentRequestPropertiesType. const ( - OutputKafkaCompressionGzip OutputKafkaCompression = "gzip" - OutputKafkaCompressionLz4 OutputKafkaCompression = "lz4" - OutputKafkaCompressionNone OutputKafkaCompression = "none" - OutputKafkaCompressionSnappy OutputKafkaCompression = "snappy" + CasesUpdateUserCommentRequestPropertiesTypeUser CasesUpdateUserCommentRequestPropertiesType = "user" ) -// Defines values for OutputKafkaPartition. +// Defines values for CasesUserActionsFindResponsePropertiesType. const ( - OutputKafkaPartitionHash OutputKafkaPartition = "hash" - OutputKafkaPartitionRandom OutputKafkaPartition = "random" - OutputKafkaPartitionRoundRobin OutputKafkaPartition = "round_robin" + CasesUserActionsFindResponsePropertiesTypeAssignees CasesUserActionsFindResponsePropertiesType = "assignees" + CasesUserActionsFindResponsePropertiesTypeComment CasesUserActionsFindResponsePropertiesType = "comment" + CasesUserActionsFindResponsePropertiesTypeConnector CasesUserActionsFindResponsePropertiesType = "connector" + CasesUserActionsFindResponsePropertiesTypeCreateCase CasesUserActionsFindResponsePropertiesType = "create_case" + CasesUserActionsFindResponsePropertiesTypeDescription CasesUserActionsFindResponsePropertiesType = "description" + CasesUserActionsFindResponsePropertiesTypePushed CasesUserActionsFindResponsePropertiesType = "pushed" + CasesUserActionsFindResponsePropertiesTypeSettings CasesUserActionsFindResponsePropertiesType = "settings" + CasesUserActionsFindResponsePropertiesTypeSeverity CasesUserActionsFindResponsePropertiesType = "severity" + CasesUserActionsFindResponsePropertiesTypeStatus CasesUserActionsFindResponsePropertiesType = "status" + CasesUserActionsFindResponsePropertiesTypeTags CasesUserActionsFindResponsePropertiesType = "tags" + CasesUserActionsFindResponsePropertiesTypeTitle CasesUserActionsFindResponsePropertiesType = "title" ) -// Defines values for OutputKafkaRequiredAcks. +// Defines values for CasesUserCommentResponsePropertiesType. const ( - OutputKafkaRequiredAcksMinus1 OutputKafkaRequiredAcks = -1 - OutputKafkaRequiredAcksN0 OutputKafkaRequiredAcks = 0 - OutputKafkaRequiredAcksN1 OutputKafkaRequiredAcks = 1 + CasesUserCommentResponsePropertiesTypeUser CasesUserCommentResponsePropertiesType = "user" ) -// Defines values for OutputKafkaSaslMechanism. +// Defines values for DataViews404ResponseError. const ( - OutputKafkaSaslMechanismPLAIN OutputKafkaSaslMechanism = "PLAIN" - OutputKafkaSaslMechanismSCRAMSHA256 OutputKafkaSaslMechanism = "SCRAM-SHA-256" - OutputKafkaSaslMechanismSCRAMSHA512 OutputKafkaSaslMechanism = "SCRAM-SHA-512" + NotFound DataViews404ResponseError = "Not Found" ) -// Defines values for OutputKafkaType. +// Defines values for DataViews404ResponseStatusCode. const ( - OutputKafkaTypeKafka OutputKafkaType = "kafka" + N404 DataViews404ResponseStatusCode = 404 ) -// Defines values for OutputLogstashType. +// Defines values for KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevel. const ( - OutputLogstashTypeLogstash OutputLogstashType = "logstash" + KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevelAvailable KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevel = "available" + KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevelCritical KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevel = "critical" + KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevelDegraded KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevel = "degraded" + KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevelUnavailable KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevel = "unavailable" ) -// Defines values for OutputRemoteElasticsearchPreset. +// Defines values for KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevel. const ( - OutputRemoteElasticsearchPresetBalanced OutputRemoteElasticsearchPreset = "balanced" - OutputRemoteElasticsearchPresetCustom OutputRemoteElasticsearchPreset = "custom" - OutputRemoteElasticsearchPresetLatency OutputRemoteElasticsearchPreset = "latency" - OutputRemoteElasticsearchPresetScale OutputRemoteElasticsearchPreset = "scale" - OutputRemoteElasticsearchPresetThroughput OutputRemoteElasticsearchPreset = "throughput" + KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevelAvailable KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevel = "available" + KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevelCritical KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevel = "critical" + KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevelDegraded KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevel = "degraded" + KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevelUnavailable KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevel = "unavailable" ) -// Defines values for OutputRemoteElasticsearchType. +// Defines values for KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevel. const ( - OutputRemoteElasticsearchTypeRemoteElasticsearch OutputRemoteElasticsearchType = "remote_elasticsearch" + KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevelAvailable KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevel = "available" + KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevelCritical KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevel = "critical" + KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevelDegraded KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevel = "degraded" + KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevelUnavailable KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevel = "unavailable" ) -// Defines values for OutputSslVerificationMode. +// Defines values for KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevel. const ( - OutputSslVerificationModeCertificate OutputSslVerificationMode = "certificate" - OutputSslVerificationModeFull OutputSslVerificationMode = "full" - OutputSslVerificationModeNone OutputSslVerificationMode = "none" - OutputSslVerificationModeStrict OutputSslVerificationMode = "strict" + KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevelAvailable KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevel = "available" + KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevelCritical KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevel = "critical" + KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevelDegraded KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevel = "degraded" + KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevelUnavailable KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevel = "unavailable" ) -// Defines values for PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0. +// Defines values for KibanaHTTPAPIsCoreStatusResponseStatusOverallLevel. const ( - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0CspRuleTemplate PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "csp-rule-template" - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Dashboard PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "dashboard" - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0IndexPattern PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "index-pattern" - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Lens PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "lens" - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Map PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "map" - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0MlModule PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "ml-module" - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0OsqueryPackAsset PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "osquery-pack-asset" - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0OsquerySavedQuery PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "osquery-saved-query" - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Search PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "search" - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0SecurityRule PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "security-rule" - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Tag PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "tag" - PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Visualization PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "visualization" + KibanaHTTPAPIsCoreStatusResponseStatusOverallLevelAvailable KibanaHTTPAPIsCoreStatusResponseStatusOverallLevel = "available" + KibanaHTTPAPIsCoreStatusResponseStatusOverallLevelCritical KibanaHTTPAPIsCoreStatusResponseStatusOverallLevel = "critical" + KibanaHTTPAPIsCoreStatusResponseStatusOverallLevelDegraded KibanaHTTPAPIsCoreStatusResponseStatusOverallLevel = "degraded" + KibanaHTTPAPIsCoreStatusResponseStatusOverallLevelUnavailable KibanaHTTPAPIsCoreStatusResponseStatusOverallLevel = "unavailable" ) -// Defines values for PackageInfoInstallationInfoInstallSource. +// Defines values for KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevel. const ( - PackageInfoInstallationInfoInstallSourceBundled PackageInfoInstallationInfoInstallSource = "bundled" - PackageInfoInstallationInfoInstallSourceCustom PackageInfoInstallationInfoInstallSource = "custom" - PackageInfoInstallationInfoInstallSourceRegistry PackageInfoInstallationInfoInstallSource = "registry" - PackageInfoInstallationInfoInstallSourceUpload PackageInfoInstallationInfoInstallSource = "upload" + KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevelAvailable KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevel = "available" + KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevelCritical KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevel = "critical" + KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevelDegraded KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevel = "degraded" + KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevelUnavailable KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevel = "unavailable" ) -// Defines values for PackageInfoInstallationInfoInstallStatus. +// Defines values for KibanaHTTPAPIsCoreStatusResponseVersionBuildFlavor. const ( - PackageInfoInstallationInfoInstallStatusInstallFailed PackageInfoInstallationInfoInstallStatus = "install_failed" - PackageInfoInstallationInfoInstallStatusInstalled PackageInfoInstallationInfoInstallStatus = "installed" - PackageInfoInstallationInfoInstallStatusInstalling PackageInfoInstallationInfoInstallStatus = "installing" + Serverless KibanaHTTPAPIsCoreStatusResponseVersionBuildFlavor = "serverless" + Traditional KibanaHTTPAPIsCoreStatusResponseVersionBuildFlavor = "traditional" ) -// Defines values for PackageInfoInstallationInfoInstalledEsType. +// Defines values for ObservabilityAIAssistantAPIFunctionCallTrigger. const ( - PackageInfoInstallationInfoInstalledEsTypeComponentTemplate PackageInfoInstallationInfoInstalledEsType = "component_template" - PackageInfoInstallationInfoInstalledEsTypeDataStreamIlmPolicy PackageInfoInstallationInfoInstalledEsType = "data_stream_ilm_policy" - PackageInfoInstallationInfoInstalledEsTypeIlmPolicy PackageInfoInstallationInfoInstalledEsType = "ilm_policy" - PackageInfoInstallationInfoInstalledEsTypeIndex PackageInfoInstallationInfoInstalledEsType = "index" - PackageInfoInstallationInfoInstalledEsTypeIndexTemplate PackageInfoInstallationInfoInstalledEsType = "index_template" - PackageInfoInstallationInfoInstalledEsTypeIngestPipeline PackageInfoInstallationInfoInstalledEsType = "ingest_pipeline" - PackageInfoInstallationInfoInstalledEsTypeMlModel PackageInfoInstallationInfoInstalledEsType = "ml_model" - PackageInfoInstallationInfoInstalledEsTypeTransform PackageInfoInstallationInfoInstalledEsType = "transform" + ObservabilityAIAssistantAPIFunctionCallTriggerAssistant ObservabilityAIAssistantAPIFunctionCallTrigger = "assistant" + ObservabilityAIAssistantAPIFunctionCallTriggerElastic ObservabilityAIAssistantAPIFunctionCallTrigger = "elastic" + ObservabilityAIAssistantAPIFunctionCallTriggerUser ObservabilityAIAssistantAPIFunctionCallTrigger = "user" ) -// Defines values for PackageInfoInstallationInfoInstalledKibanaType0. +// Defines values for ObservabilityAIAssistantAPIMessageRoleEnum. const ( - PackageInfoInstallationInfoInstalledKibanaType0CspRuleTemplate PackageInfoInstallationInfoInstalledKibanaType0 = "csp-rule-template" - PackageInfoInstallationInfoInstalledKibanaType0Dashboard PackageInfoInstallationInfoInstalledKibanaType0 = "dashboard" - PackageInfoInstallationInfoInstalledKibanaType0IndexPattern PackageInfoInstallationInfoInstalledKibanaType0 = "index-pattern" - PackageInfoInstallationInfoInstalledKibanaType0Lens PackageInfoInstallationInfoInstalledKibanaType0 = "lens" - PackageInfoInstallationInfoInstalledKibanaType0Map PackageInfoInstallationInfoInstalledKibanaType0 = "map" - PackageInfoInstallationInfoInstalledKibanaType0MlModule PackageInfoInstallationInfoInstalledKibanaType0 = "ml-module" - PackageInfoInstallationInfoInstalledKibanaType0OsqueryPackAsset PackageInfoInstallationInfoInstalledKibanaType0 = "osquery-pack-asset" - PackageInfoInstallationInfoInstalledKibanaType0OsquerySavedQuery PackageInfoInstallationInfoInstalledKibanaType0 = "osquery-saved-query" - PackageInfoInstallationInfoInstalledKibanaType0Search PackageInfoInstallationInfoInstalledKibanaType0 = "search" - PackageInfoInstallationInfoInstalledKibanaType0SecurityRule PackageInfoInstallationInfoInstalledKibanaType0 = "security-rule" - PackageInfoInstallationInfoInstalledKibanaType0Tag PackageInfoInstallationInfoInstalledKibanaType0 = "tag" - PackageInfoInstallationInfoInstalledKibanaType0Visualization PackageInfoInstallationInfoInstalledKibanaType0 = "visualization" + ObservabilityAIAssistantAPIMessageRoleEnumAssistant ObservabilityAIAssistantAPIMessageRoleEnum = "assistant" + ObservabilityAIAssistantAPIMessageRoleEnumElastic ObservabilityAIAssistantAPIMessageRoleEnum = "elastic" + ObservabilityAIAssistantAPIMessageRoleEnumFunction ObservabilityAIAssistantAPIMessageRoleEnum = "function" + ObservabilityAIAssistantAPIMessageRoleEnumSystem ObservabilityAIAssistantAPIMessageRoleEnum = "system" + ObservabilityAIAssistantAPIMessageRoleEnumUser ObservabilityAIAssistantAPIMessageRoleEnum = "user" ) -// Defines values for PackageInfoInstallationInfoVerificationStatus. +// Defines values for SLOsBudgetingMethod. const ( - PackageInfoInstallationInfoVerificationStatusUnknown PackageInfoInstallationInfoVerificationStatus = "unknown" - PackageInfoInstallationInfoVerificationStatusUnverified PackageInfoInstallationInfoVerificationStatus = "unverified" - PackageInfoInstallationInfoVerificationStatusVerified PackageInfoInstallationInfoVerificationStatus = "verified" + Occurrences SLOsBudgetingMethod = "occurrences" + Timeslices SLOsBudgetingMethod = "timeslices" ) -// Defines values for PackageInfoOwnerType. +// Defines values for SLOsBulkPurgeRollupRequestPurgePolicy0PurgeType. const ( - PackageInfoOwnerTypeCommunity PackageInfoOwnerType = "community" - PackageInfoOwnerTypeElastic PackageInfoOwnerType = "elastic" - PackageInfoOwnerTypePartner PackageInfoOwnerType = "partner" + FixedAge SLOsBulkPurgeRollupRequestPurgePolicy0PurgeType = "fixed-age" ) -// Defines values for PackageInfoRelease. +// Defines values for SLOsBulkPurgeRollupRequestPurgePolicy1PurgeType. const ( - PackageInfoReleaseBeta PackageInfoRelease = "beta" - PackageInfoReleaseExperimental PackageInfoRelease = "experimental" - PackageInfoReleaseGa PackageInfoRelease = "ga" + FixedTime SLOsBulkPurgeRollupRequestPurgePolicy1PurgeType = "fixed-time" ) -// Defines values for PackageInfoType0. +// Defines values for SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0Aggregation. const ( - PackageInfoType0Integration PackageInfoType0 = "integration" + SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0AggregationSum SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0Aggregation = "sum" ) -// Defines values for PackageInfoType1. +// Defines values for SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1Aggregation. const ( - PackageInfoType1Input PackageInfoType1 = "input" + SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1AggregationDocCount SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1Aggregation = "doc_count" ) -// Defines values for PackageInfoType2. +// Defines values for SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0Aggregation. const ( - PackageInfoType2Content PackageInfoType2 = "content" + SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0AggregationSum SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0Aggregation = "sum" ) -// Defines values for PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0. +// Defines values for SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1Aggregation. const ( - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0CspRuleTemplate PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "csp-rule-template" - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Dashboard PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "dashboard" - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0IndexPattern PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "index-pattern" - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Lens PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "lens" - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Map PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "map" - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0MlModule PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "ml-module" - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0OsqueryPackAsset PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "osquery-pack-asset" - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0OsquerySavedQuery PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "osquery-saved-query" - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Search PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "search" - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0SecurityRule PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "security-rule" - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Tag PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "tag" - PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Visualization PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "visualization" + SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1AggregationDocCount SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1Aggregation = "doc_count" ) -// Defines values for PackageListItemInstallationInfoInstallSource. +// Defines values for SLOsIndicatorPropertiesHistogramParamsGoodAggregation. const ( - PackageListItemInstallationInfoInstallSourceBundled PackageListItemInstallationInfoInstallSource = "bundled" - PackageListItemInstallationInfoInstallSourceCustom PackageListItemInstallationInfoInstallSource = "custom" - PackageListItemInstallationInfoInstallSourceRegistry PackageListItemInstallationInfoInstallSource = "registry" - PackageListItemInstallationInfoInstallSourceUpload PackageListItemInstallationInfoInstallSource = "upload" + SLOsIndicatorPropertiesHistogramParamsGoodAggregationRange SLOsIndicatorPropertiesHistogramParamsGoodAggregation = "range" + SLOsIndicatorPropertiesHistogramParamsGoodAggregationValueCount SLOsIndicatorPropertiesHistogramParamsGoodAggregation = "value_count" ) -// Defines values for PackageListItemInstallationInfoInstallStatus. +// Defines values for SLOsIndicatorPropertiesHistogramParamsTotalAggregation. const ( - PackageListItemInstallationInfoInstallStatusInstallFailed PackageListItemInstallationInfoInstallStatus = "install_failed" - PackageListItemInstallationInfoInstallStatusInstalled PackageListItemInstallationInfoInstallStatus = "installed" - PackageListItemInstallationInfoInstallStatusInstalling PackageListItemInstallationInfoInstallStatus = "installing" + SLOsIndicatorPropertiesHistogramParamsTotalAggregationRange SLOsIndicatorPropertiesHistogramParamsTotalAggregation = "range" + SLOsIndicatorPropertiesHistogramParamsTotalAggregationValueCount SLOsIndicatorPropertiesHistogramParamsTotalAggregation = "value_count" ) -// Defines values for PackageListItemInstallationInfoInstalledEsType. +// Defines values for SLOsIndicatorPropertiesTimesliceMetricParamsMetricComparator. const ( - PackageListItemInstallationInfoInstalledEsTypeComponentTemplate PackageListItemInstallationInfoInstalledEsType = "component_template" - PackageListItemInstallationInfoInstalledEsTypeDataStreamIlmPolicy PackageListItemInstallationInfoInstalledEsType = "data_stream_ilm_policy" - PackageListItemInstallationInfoInstalledEsTypeIlmPolicy PackageListItemInstallationInfoInstalledEsType = "ilm_policy" - PackageListItemInstallationInfoInstalledEsTypeIndex PackageListItemInstallationInfoInstalledEsType = "index" - PackageListItemInstallationInfoInstalledEsTypeIndexTemplate PackageListItemInstallationInfoInstalledEsType = "index_template" - PackageListItemInstallationInfoInstalledEsTypeIngestPipeline PackageListItemInstallationInfoInstalledEsType = "ingest_pipeline" - PackageListItemInstallationInfoInstalledEsTypeMlModel PackageListItemInstallationInfoInstalledEsType = "ml_model" - PackageListItemInstallationInfoInstalledEsTypeTransform PackageListItemInstallationInfoInstalledEsType = "transform" + GT SLOsIndicatorPropertiesTimesliceMetricParamsMetricComparator = "GT" + GTE SLOsIndicatorPropertiesTimesliceMetricParamsMetricComparator = "GTE" + LT SLOsIndicatorPropertiesTimesliceMetricParamsMetricComparator = "LT" + LTE SLOsIndicatorPropertiesTimesliceMetricParamsMetricComparator = "LTE" ) -// Defines values for PackageListItemInstallationInfoInstalledKibanaType0. +// Defines values for SLOsSummaryStatus. const ( - PackageListItemInstallationInfoInstalledKibanaType0CspRuleTemplate PackageListItemInstallationInfoInstalledKibanaType0 = "csp-rule-template" - PackageListItemInstallationInfoInstalledKibanaType0Dashboard PackageListItemInstallationInfoInstalledKibanaType0 = "dashboard" - PackageListItemInstallationInfoInstalledKibanaType0IndexPattern PackageListItemInstallationInfoInstalledKibanaType0 = "index-pattern" - PackageListItemInstallationInfoInstalledKibanaType0Lens PackageListItemInstallationInfoInstalledKibanaType0 = "lens" - PackageListItemInstallationInfoInstalledKibanaType0Map PackageListItemInstallationInfoInstalledKibanaType0 = "map" - PackageListItemInstallationInfoInstalledKibanaType0MlModule PackageListItemInstallationInfoInstalledKibanaType0 = "ml-module" - PackageListItemInstallationInfoInstalledKibanaType0OsqueryPackAsset PackageListItemInstallationInfoInstalledKibanaType0 = "osquery-pack-asset" - PackageListItemInstallationInfoInstalledKibanaType0OsquerySavedQuery PackageListItemInstallationInfoInstalledKibanaType0 = "osquery-saved-query" - PackageListItemInstallationInfoInstalledKibanaType0Search PackageListItemInstallationInfoInstalledKibanaType0 = "search" - PackageListItemInstallationInfoInstalledKibanaType0SecurityRule PackageListItemInstallationInfoInstalledKibanaType0 = "security-rule" - PackageListItemInstallationInfoInstalledKibanaType0Tag PackageListItemInstallationInfoInstalledKibanaType0 = "tag" - PackageListItemInstallationInfoInstalledKibanaType0Visualization PackageListItemInstallationInfoInstalledKibanaType0 = "visualization" + DEGRADING SLOsSummaryStatus = "DEGRADING" + HEALTHY SLOsSummaryStatus = "HEALTHY" + NODATA SLOsSummaryStatus = "NO_DATA" + VIOLATED SLOsSummaryStatus = "VIOLATED" ) -// Defines values for PackageListItemInstallationInfoVerificationStatus. +// Defines values for SLOsTimeWindowType. const ( - PackageListItemInstallationInfoVerificationStatusUnknown PackageListItemInstallationInfoVerificationStatus = "unknown" - PackageListItemInstallationInfoVerificationStatusUnverified PackageListItemInstallationInfoVerificationStatus = "unverified" - PackageListItemInstallationInfoVerificationStatusVerified PackageListItemInstallationInfoVerificationStatus = "verified" + CalendarAligned SLOsTimeWindowType = "calendarAligned" + Rolling SLOsTimeWindowType = "rolling" ) -// Defines values for PackageListItemOwnerType. +// Defines values for SLOsTimesliceMetricBasicMetricWithFieldAggregation. const ( - PackageListItemOwnerTypeCommunity PackageListItemOwnerType = "community" - PackageListItemOwnerTypeElastic PackageListItemOwnerType = "elastic" - PackageListItemOwnerTypePartner PackageListItemOwnerType = "partner" + SLOsTimesliceMetricBasicMetricWithFieldAggregationAvg SLOsTimesliceMetricBasicMetricWithFieldAggregation = "avg" + SLOsTimesliceMetricBasicMetricWithFieldAggregationCardinality SLOsTimesliceMetricBasicMetricWithFieldAggregation = "cardinality" + SLOsTimesliceMetricBasicMetricWithFieldAggregationLastValue SLOsTimesliceMetricBasicMetricWithFieldAggregation = "last_value" + SLOsTimesliceMetricBasicMetricWithFieldAggregationMax SLOsTimesliceMetricBasicMetricWithFieldAggregation = "max" + SLOsTimesliceMetricBasicMetricWithFieldAggregationMin SLOsTimesliceMetricBasicMetricWithFieldAggregation = "min" + SLOsTimesliceMetricBasicMetricWithFieldAggregationStdDeviation SLOsTimesliceMetricBasicMetricWithFieldAggregation = "std_deviation" + SLOsTimesliceMetricBasicMetricWithFieldAggregationSum SLOsTimesliceMetricBasicMetricWithFieldAggregation = "sum" ) -// Defines values for PackageListItemRelease. +// Defines values for SLOsTimesliceMetricDocCountMetricAggregation. const ( - Beta PackageListItemRelease = "beta" - Experimental PackageListItemRelease = "experimental" - Ga PackageListItemRelease = "ga" + DocCount SLOsTimesliceMetricDocCountMetricAggregation = "doc_count" ) -// Defines values for PackageListItemType0. +// Defines values for SLOsTimesliceMetricPercentileMetricAggregation. const ( - PackageListItemType0Integration PackageListItemType0 = "integration" + Percentile SLOsTimesliceMetricPercentileMetricAggregation = "percentile" ) -// Defines values for PackageListItemType1. +// Defines values for SavedObjects400ResponseError. const ( - PackageListItemType1Input PackageListItemType1 = "input" + BadRequest SavedObjects400ResponseError = "Bad Request" ) -// Defines values for PackageListItemType2. +// Defines values for SavedObjects400ResponseStatusCode. const ( - PackageListItemType2Content PackageListItemType2 = "content" + N400 SavedObjects400ResponseStatusCode = 400 ) -// Defines values for ServerHostSslClientAuth. +// Defines values for SecurityAIAssistantAPIAnonymizationFieldsBulkActionSkipReason. const ( - ServerHostSslClientAuthNone ServerHostSslClientAuth = "none" - ServerHostSslClientAuthOptional ServerHostSslClientAuth = "optional" - ServerHostSslClientAuthRequired ServerHostSslClientAuth = "required" + ANONYMIZATIONFIELDNOTMODIFIED SecurityAIAssistantAPIAnonymizationFieldsBulkActionSkipReason = "ANONYMIZATION_FIELD_NOT_MODIFIED" ) -// Defines values for SwimlaneConfigConnectorType. +// Defines values for SecurityAIAssistantAPIChatMessageRole. const ( - Alerts SwimlaneConfigConnectorType = "alerts" - All SwimlaneConfigConnectorType = "all" - Cases SwimlaneConfigConnectorType = "cases" + SecurityAIAssistantAPIChatMessageRoleAssistant SecurityAIAssistantAPIChatMessageRole = "assistant" + SecurityAIAssistantAPIChatMessageRoleSystem SecurityAIAssistantAPIChatMessageRole = "system" + SecurityAIAssistantAPIChatMessageRoleUser SecurityAIAssistantAPIChatMessageRole = "user" ) -// Defines values for UpdateOutputElasticsearchPreset. +// Defines values for SecurityAIAssistantAPIConversationCategory. const ( - UpdateOutputElasticsearchPresetBalanced UpdateOutputElasticsearchPreset = "balanced" - UpdateOutputElasticsearchPresetCustom UpdateOutputElasticsearchPreset = "custom" - UpdateOutputElasticsearchPresetLatency UpdateOutputElasticsearchPreset = "latency" - UpdateOutputElasticsearchPresetScale UpdateOutputElasticsearchPreset = "scale" - UpdateOutputElasticsearchPresetThroughput UpdateOutputElasticsearchPreset = "throughput" + SecurityAIAssistantAPIConversationCategoryAssistant SecurityAIAssistantAPIConversationCategory = "assistant" + SecurityAIAssistantAPIConversationCategoryInsights SecurityAIAssistantAPIConversationCategory = "insights" ) -// Defines values for UpdateOutputElasticsearchType. +// Defines values for SecurityAIAssistantAPIDocumentEntryType. const ( - Elasticsearch UpdateOutputElasticsearchType = "elasticsearch" + SecurityAIAssistantAPIDocumentEntryTypeDocument SecurityAIAssistantAPIDocumentEntryType = "document" ) -// Defines values for UpdateOutputKafkaAuthType. +// Defines values for SecurityAIAssistantAPIDocumentEntryCreateFieldsType. const ( - UpdateOutputKafkaAuthTypeKerberos UpdateOutputKafkaAuthType = "kerberos" - UpdateOutputKafkaAuthTypeNone UpdateOutputKafkaAuthType = "none" - UpdateOutputKafkaAuthTypeSsl UpdateOutputKafkaAuthType = "ssl" - UpdateOutputKafkaAuthTypeUserPass UpdateOutputKafkaAuthType = "user_pass" + SecurityAIAssistantAPIDocumentEntryCreateFieldsTypeDocument SecurityAIAssistantAPIDocumentEntryCreateFieldsType = "document" ) -// Defines values for UpdateOutputKafkaCompression. +// Defines values for SecurityAIAssistantAPIDocumentEntryRequiredFieldsType. const ( - UpdateOutputKafkaCompressionGzip UpdateOutputKafkaCompression = "gzip" - UpdateOutputKafkaCompressionLz4 UpdateOutputKafkaCompression = "lz4" - UpdateOutputKafkaCompressionNone UpdateOutputKafkaCompression = "none" - UpdateOutputKafkaCompressionSnappy UpdateOutputKafkaCompression = "snappy" + SecurityAIAssistantAPIDocumentEntryRequiredFieldsTypeDocument SecurityAIAssistantAPIDocumentEntryRequiredFieldsType = "document" ) -// Defines values for UpdateOutputKafkaPartition. +// Defines values for SecurityAIAssistantAPIDocumentEntryResponseFieldsType. const ( - Hash UpdateOutputKafkaPartition = "hash" - Random UpdateOutputKafkaPartition = "random" - RoundRobin UpdateOutputKafkaPartition = "round_robin" + SecurityAIAssistantAPIDocumentEntryResponseFieldsTypeDocument SecurityAIAssistantAPIDocumentEntryResponseFieldsType = "document" ) -// Defines values for UpdateOutputKafkaRequiredAcks. +// Defines values for SecurityAIAssistantAPIDocumentEntryUpdateFieldsType. const ( - Minus1 UpdateOutputKafkaRequiredAcks = -1 - N0 UpdateOutputKafkaRequiredAcks = 0 - N1 UpdateOutputKafkaRequiredAcks = 1 + Document SecurityAIAssistantAPIDocumentEntryUpdateFieldsType = "document" ) -// Defines values for UpdateOutputKafkaSaslMechanism. +// Defines values for SecurityAIAssistantAPIEsqlContentReferenceType. const ( - PLAIN UpdateOutputKafkaSaslMechanism = "PLAIN" - SCRAMSHA256 UpdateOutputKafkaSaslMechanism = "SCRAM-SHA-256" - SCRAMSHA512 UpdateOutputKafkaSaslMechanism = "SCRAM-SHA-512" + SecurityAIAssistantAPIEsqlContentReferenceTypeEsqlQuery SecurityAIAssistantAPIEsqlContentReferenceType = "EsqlQuery" ) -// Defines values for UpdateOutputKafkaType. +// Defines values for SecurityAIAssistantAPIFindAnonymizationFieldsSortField. const ( - Kafka UpdateOutputKafkaType = "kafka" + SecurityAIAssistantAPIFindAnonymizationFieldsSortFieldAllowed SecurityAIAssistantAPIFindAnonymizationFieldsSortField = "allowed" + SecurityAIAssistantAPIFindAnonymizationFieldsSortFieldAnonymized SecurityAIAssistantAPIFindAnonymizationFieldsSortField = "anonymized" + SecurityAIAssistantAPIFindAnonymizationFieldsSortFieldCreatedAt SecurityAIAssistantAPIFindAnonymizationFieldsSortField = "created_at" + SecurityAIAssistantAPIFindAnonymizationFieldsSortFieldField SecurityAIAssistantAPIFindAnonymizationFieldsSortField = "field" + SecurityAIAssistantAPIFindAnonymizationFieldsSortFieldUpdatedAt SecurityAIAssistantAPIFindAnonymizationFieldsSortField = "updated_at" ) -// Defines values for UpdateOutputLogstashType. +// Defines values for SecurityAIAssistantAPIFindConversationsSortField. const ( - Logstash UpdateOutputLogstashType = "logstash" + SecurityAIAssistantAPIFindConversationsSortFieldCreatedAt SecurityAIAssistantAPIFindConversationsSortField = "created_at" + SecurityAIAssistantAPIFindConversationsSortFieldTitle SecurityAIAssistantAPIFindConversationsSortField = "title" + SecurityAIAssistantAPIFindConversationsSortFieldUpdatedAt SecurityAIAssistantAPIFindConversationsSortField = "updated_at" ) -// Defines values for UpdateOutputRemoteElasticsearchPreset. +// Defines values for SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortField. const ( - UpdateOutputRemoteElasticsearchPresetBalanced UpdateOutputRemoteElasticsearchPreset = "balanced" - UpdateOutputRemoteElasticsearchPresetCustom UpdateOutputRemoteElasticsearchPreset = "custom" - UpdateOutputRemoteElasticsearchPresetLatency UpdateOutputRemoteElasticsearchPreset = "latency" - UpdateOutputRemoteElasticsearchPresetScale UpdateOutputRemoteElasticsearchPreset = "scale" - UpdateOutputRemoteElasticsearchPresetThroughput UpdateOutputRemoteElasticsearchPreset = "throughput" + SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortFieldCreatedAt SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortField = "created_at" + SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortFieldIsDefault SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortField = "is_default" + SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortFieldTitle SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortField = "title" + SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortFieldUpdatedAt SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortField = "updated_at" ) -// Defines values for UpdateOutputRemoteElasticsearchType. +// Defines values for SecurityAIAssistantAPIFindPromptsSortField. const ( - RemoteElasticsearch UpdateOutputRemoteElasticsearchType = "remote_elasticsearch" + SecurityAIAssistantAPIFindPromptsSortFieldCreatedAt SecurityAIAssistantAPIFindPromptsSortField = "created_at" + SecurityAIAssistantAPIFindPromptsSortFieldIsDefault SecurityAIAssistantAPIFindPromptsSortField = "is_default" + SecurityAIAssistantAPIFindPromptsSortFieldName SecurityAIAssistantAPIFindPromptsSortField = "name" + SecurityAIAssistantAPIFindPromptsSortFieldUpdatedAt SecurityAIAssistantAPIFindPromptsSortField = "updated_at" ) -// Defines values for UpdateOutputSslVerificationMode. +// Defines values for SecurityAIAssistantAPIHrefContentReferenceType. const ( - UpdateOutputSslVerificationModeCertificate UpdateOutputSslVerificationMode = "certificate" - UpdateOutputSslVerificationModeFull UpdateOutputSslVerificationMode = "full" - UpdateOutputSslVerificationModeNone UpdateOutputSslVerificationMode = "none" - UpdateOutputSslVerificationModeStrict UpdateOutputSslVerificationMode = "strict" + Href SecurityAIAssistantAPIHrefContentReferenceType = "Href" ) -// Defines values for VerificationMode. +// Defines values for SecurityAIAssistantAPIIndexEntryType. const ( - VerificationModeCertificate VerificationMode = "certificate" - VerificationModeFull VerificationMode = "full" - VerificationModeNone VerificationMode = "none" + SecurityAIAssistantAPIIndexEntryTypeIndex SecurityAIAssistantAPIIndexEntryType = "index" ) -// Defines values for WebhookConfigMethod. +// Defines values for SecurityAIAssistantAPIIndexEntryCreateFieldsType. const ( - WebhookConfigMethodPost WebhookConfigMethod = "post" - WebhookConfigMethodPut WebhookConfigMethod = "put" + SecurityAIAssistantAPIIndexEntryCreateFieldsTypeIndex SecurityAIAssistantAPIIndexEntryCreateFieldsType = "index" ) -// Defines values for APMUIElasticApiVersion. +// Defines values for SecurityAIAssistantAPIIndexEntryRequiredFieldsType. const ( - APMUIElasticApiVersionN20231031 APMUIElasticApiVersion = "2023-10-31" + SecurityAIAssistantAPIIndexEntryRequiredFieldsTypeIndex SecurityAIAssistantAPIIndexEntryRequiredFieldsType = "index" ) -// Defines values for DeleteAgentConfigurationParamsElasticApiVersion. +// Defines values for SecurityAIAssistantAPIIndexEntryResponseFieldsType. const ( - DeleteAgentConfigurationParamsElasticApiVersionN20231031 DeleteAgentConfigurationParamsElasticApiVersion = "2023-10-31" + SecurityAIAssistantAPIIndexEntryResponseFieldsTypeIndex SecurityAIAssistantAPIIndexEntryResponseFieldsType = "index" ) -// Defines values for GetAgentConfigurationsParamsElasticApiVersion. +// Defines values for SecurityAIAssistantAPIIndexEntryUpdateFieldsType. const ( - GetAgentConfigurationsParamsElasticApiVersionN20231031 GetAgentConfigurationsParamsElasticApiVersion = "2023-10-31" + SecurityAIAssistantAPIIndexEntryUpdateFieldsTypeIndex SecurityAIAssistantAPIIndexEntryUpdateFieldsType = "index" ) -// Defines values for CreateUpdateAgentConfigurationParamsElasticApiVersion. +// Defines values for SecurityAIAssistantAPIKnowledgeBaseEntryBulkActionSkipReason. const ( - CreateUpdateAgentConfigurationParamsElasticApiVersionN20231031 CreateUpdateAgentConfigurationParamsElasticApiVersion = "2023-10-31" + KNOWLEDGEBASEENTRYNOTMODIFIED SecurityAIAssistantAPIKnowledgeBaseEntryBulkActionSkipReason = "KNOWLEDGE_BASE_ENTRY_NOT_MODIFIED" ) -// Defines values for GetFleetAgentPoliciesParamsSortOrder. +// Defines values for SecurityAIAssistantAPIKnowledgeBaseEntryContentReferenceType. const ( - GetFleetAgentPoliciesParamsSortOrderAsc GetFleetAgentPoliciesParamsSortOrder = "asc" - GetFleetAgentPoliciesParamsSortOrderDesc GetFleetAgentPoliciesParamsSortOrder = "desc" + KnowledgeBaseEntry SecurityAIAssistantAPIKnowledgeBaseEntryContentReferenceType = "KnowledgeBaseEntry" ) -// Defines values for GetFleetAgentPoliciesParamsFormat. +// Defines values for SecurityAIAssistantAPIKnowledgeBaseResource. const ( - GetFleetAgentPoliciesParamsFormatLegacy GetFleetAgentPoliciesParamsFormat = "legacy" - GetFleetAgentPoliciesParamsFormatSimplified GetFleetAgentPoliciesParamsFormat = "simplified" + SecurityAIAssistantAPIKnowledgeBaseResourceDefendInsights SecurityAIAssistantAPIKnowledgeBaseResource = "defend_insights" + SecurityAIAssistantAPIKnowledgeBaseResourceSecurityLabs SecurityAIAssistantAPIKnowledgeBaseResource = "security_labs" + SecurityAIAssistantAPIKnowledgeBaseResourceUser SecurityAIAssistantAPIKnowledgeBaseResource = "user" ) -// Defines values for PostFleetAgentPoliciesJSONBodyMonitoringEnabled. +// Defines values for SecurityAIAssistantAPIMessageRole. const ( - PostFleetAgentPoliciesJSONBodyMonitoringEnabledLogs PostFleetAgentPoliciesJSONBodyMonitoringEnabled = "logs" - PostFleetAgentPoliciesJSONBodyMonitoringEnabledMetrics PostFleetAgentPoliciesJSONBodyMonitoringEnabled = "metrics" - PostFleetAgentPoliciesJSONBodyMonitoringEnabledTraces PostFleetAgentPoliciesJSONBodyMonitoringEnabled = "traces" + SecurityAIAssistantAPIMessageRoleAssistant SecurityAIAssistantAPIMessageRole = "assistant" + SecurityAIAssistantAPIMessageRoleSystem SecurityAIAssistantAPIMessageRole = "system" + SecurityAIAssistantAPIMessageRoleUser SecurityAIAssistantAPIMessageRole = "user" ) -// Defines values for GetFleetAgentPoliciesAgentpolicyidParamsFormat. +// Defines values for SecurityAIAssistantAPIProductDocumentationContentReferenceType. const ( - GetFleetAgentPoliciesAgentpolicyidParamsFormatLegacy GetFleetAgentPoliciesAgentpolicyidParamsFormat = "legacy" - GetFleetAgentPoliciesAgentpolicyidParamsFormatSimplified GetFleetAgentPoliciesAgentpolicyidParamsFormat = "simplified" + ProductDocumentation SecurityAIAssistantAPIProductDocumentationContentReferenceType = "ProductDocumentation" ) -// Defines values for PutFleetAgentPoliciesAgentpolicyidParamsFormat. +// Defines values for SecurityAIAssistantAPIPromptType. const ( - PutFleetAgentPoliciesAgentpolicyidParamsFormatLegacy PutFleetAgentPoliciesAgentpolicyidParamsFormat = "legacy" - PutFleetAgentPoliciesAgentpolicyidParamsFormatSimplified PutFleetAgentPoliciesAgentpolicyidParamsFormat = "simplified" + SecurityAIAssistantAPIPromptTypeQuick SecurityAIAssistantAPIPromptType = "quick" + SecurityAIAssistantAPIPromptTypeSystem SecurityAIAssistantAPIPromptType = "system" ) -// Defines values for PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled. +// Defines values for SecurityAIAssistantAPIPromptsBulkActionSkipReason. const ( - Logs PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled = "logs" - Metrics PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled = "metrics" - Traces PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled = "traces" + PROMPTFIELDNOTMODIFIED SecurityAIAssistantAPIPromptsBulkActionSkipReason = "PROMPT_FIELD_NOT_MODIFIED" ) -// Defines values for PostFleetFleetServerHostsJSONBodySslClientAuth. +// Defines values for SecurityAIAssistantAPIProvider. const ( - PostFleetFleetServerHostsJSONBodySslClientAuthNone PostFleetFleetServerHostsJSONBodySslClientAuth = "none" - PostFleetFleetServerHostsJSONBodySslClientAuthOptional PostFleetFleetServerHostsJSONBodySslClientAuth = "optional" - PostFleetFleetServerHostsJSONBodySslClientAuthRequired PostFleetFleetServerHostsJSONBodySslClientAuth = "required" + SecurityAIAssistantAPIProviderAzureOpenAI SecurityAIAssistantAPIProvider = "Azure OpenAI" + SecurityAIAssistantAPIProviderOpenAI SecurityAIAssistantAPIProvider = "OpenAI" + SecurityAIAssistantAPIProviderOther SecurityAIAssistantAPIProvider = "Other" ) -// Defines values for PutFleetFleetServerHostsItemidJSONBodySslClientAuth. +// Defines values for SecurityAIAssistantAPISecurityAlertContentReferenceType. const ( - PutFleetFleetServerHostsItemidJSONBodySslClientAuthNone PutFleetFleetServerHostsItemidJSONBodySslClientAuth = "none" - PutFleetFleetServerHostsItemidJSONBodySslClientAuthOptional PutFleetFleetServerHostsItemidJSONBodySslClientAuth = "optional" - PutFleetFleetServerHostsItemidJSONBodySslClientAuthRequired PutFleetFleetServerHostsItemidJSONBodySslClientAuth = "required" + SecurityAlert SecurityAIAssistantAPISecurityAlertContentReferenceType = "SecurityAlert" ) -// Defines values for GetFleetPackagePoliciesParamsSortOrder. +// Defines values for SecurityAIAssistantAPISecurityAlertsPageContentReferenceType. const ( - GetFleetPackagePoliciesParamsSortOrderAsc GetFleetPackagePoliciesParamsSortOrder = "asc" - GetFleetPackagePoliciesParamsSortOrderDesc GetFleetPackagePoliciesParamsSortOrder = "desc" + SecurityAlertsPage SecurityAIAssistantAPISecurityAlertsPageContentReferenceType = "SecurityAlertsPage" ) -// Defines values for GetFleetPackagePoliciesParamsFormat. +// Defines values for SecurityAIAssistantAPISortOrder. const ( - GetFleetPackagePoliciesParamsFormatLegacy GetFleetPackagePoliciesParamsFormat = "legacy" - GetFleetPackagePoliciesParamsFormatSimplified GetFleetPackagePoliciesParamsFormat = "simplified" + SecurityAIAssistantAPISortOrderAsc SecurityAIAssistantAPISortOrder = "asc" + SecurityAIAssistantAPISortOrderDesc SecurityAIAssistantAPISortOrder = "desc" ) -// Defines values for PostFleetPackagePoliciesParamsFormat. +// Defines values for SecurityDetectionsAPIAlertStatus. const ( - PostFleetPackagePoliciesParamsFormatLegacy PostFleetPackagePoliciesParamsFormat = "legacy" - PostFleetPackagePoliciesParamsFormatSimplified PostFleetPackagePoliciesParamsFormat = "simplified" + SecurityDetectionsAPIAlertStatusAcknowledged SecurityDetectionsAPIAlertStatus = "acknowledged" + SecurityDetectionsAPIAlertStatusClosed SecurityDetectionsAPIAlertStatus = "closed" + SecurityDetectionsAPIAlertStatusInProgress SecurityDetectionsAPIAlertStatus = "in-progress" + SecurityDetectionsAPIAlertStatusOpen SecurityDetectionsAPIAlertStatus = "open" ) -// Defines values for GetFleetPackagePoliciesPackagepolicyidParamsFormat. +// Defines values for SecurityDetectionsAPIAlertSuppressionDurationUnit. const ( - GetFleetPackagePoliciesPackagepolicyidParamsFormatLegacy GetFleetPackagePoliciesPackagepolicyidParamsFormat = "legacy" - GetFleetPackagePoliciesPackagepolicyidParamsFormatSimplified GetFleetPackagePoliciesPackagepolicyidParamsFormat = "simplified" + SecurityDetectionsAPIAlertSuppressionDurationUnitH SecurityDetectionsAPIAlertSuppressionDurationUnit = "h" + SecurityDetectionsAPIAlertSuppressionDurationUnitM SecurityDetectionsAPIAlertSuppressionDurationUnit = "m" + SecurityDetectionsAPIAlertSuppressionDurationUnitS SecurityDetectionsAPIAlertSuppressionDurationUnit = "s" ) -// Defines values for PutFleetPackagePoliciesPackagepolicyidParamsFormat. +// Defines values for SecurityDetectionsAPIAlertSuppressionMissingFieldsStrategy. const ( - Legacy PutFleetPackagePoliciesPackagepolicyidParamsFormat = "legacy" - Simplified PutFleetPackagePoliciesPackagepolicyidParamsFormat = "simplified" + DoNotSuppress SecurityDetectionsAPIAlertSuppressionMissingFieldsStrategy = "doNotSuppress" + Suppress SecurityDetectionsAPIAlertSuppressionMissingFieldsStrategy = "suppress" ) -// APMUI400Response defines model for APM_UI_400_response. -type APMUI400Response struct { - // Error Error type - Error *string `json:"error,omitempty"` +// Defines values for SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppressionType. +const ( + DeleteAlertSuppression SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppressionType = "delete_alert_suppression" +) - // Message Error message - Message *string `json:"message,omitempty"` +// Defines values for SecurityDetectionsAPIBulkActionEditPayloadIndexPatternsType. +const ( + AddIndexPatterns SecurityDetectionsAPIBulkActionEditPayloadIndexPatternsType = "add_index_patterns" + DeleteIndexPatterns SecurityDetectionsAPIBulkActionEditPayloadIndexPatternsType = "delete_index_patterns" + SetIndexPatterns SecurityDetectionsAPIBulkActionEditPayloadIndexPatternsType = "set_index_patterns" +) - // StatusCode Error status code - StatusCode *float32 `json:"statusCode,omitempty"` -} +// Defines values for SecurityDetectionsAPIBulkActionEditPayloadInvestigationFieldsType. +const ( + AddInvestigationFields SecurityDetectionsAPIBulkActionEditPayloadInvestigationFieldsType = "add_investigation_fields" + DeleteInvestigationFields SecurityDetectionsAPIBulkActionEditPayloadInvestigationFieldsType = "delete_investigation_fields" + SetInvestigationFields SecurityDetectionsAPIBulkActionEditPayloadInvestigationFieldsType = "set_investigation_fields" +) -// APMUI401Response defines model for APM_UI_401_response. -type APMUI401Response struct { - // Error Error type - Error *string `json:"error,omitempty"` +// Defines values for SecurityDetectionsAPIBulkActionEditPayloadRuleActionsType. +const ( + AddRuleActions SecurityDetectionsAPIBulkActionEditPayloadRuleActionsType = "add_rule_actions" + SetRuleActions SecurityDetectionsAPIBulkActionEditPayloadRuleActionsType = "set_rule_actions" +) - // Message Error message - Message *string `json:"message,omitempty"` +// Defines values for SecurityDetectionsAPIBulkActionEditPayloadScheduleType. +const ( + SetSchedule SecurityDetectionsAPIBulkActionEditPayloadScheduleType = "set_schedule" +) - // StatusCode Error status code - StatusCode *float32 `json:"statusCode,omitempty"` -} +// Defines values for SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionType. +const ( + SetAlertSuppression SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionType = "set_alert_suppression" +) -// APMUI403Response defines model for APM_UI_403_response. -type APMUI403Response struct { - // Error Error type - Error *string `json:"error,omitempty"` +// Defines values for SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThresholdType. +const ( + SetAlertSuppressionForThreshold SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThresholdType = "set_alert_suppression_for_threshold" +) - // Message Error message - Message *string `json:"message,omitempty"` +// Defines values for SecurityDetectionsAPIBulkActionEditPayloadTagsType. +const ( + AddTags SecurityDetectionsAPIBulkActionEditPayloadTagsType = "add_tags" + DeleteTags SecurityDetectionsAPIBulkActionEditPayloadTagsType = "delete_tags" + SetTags SecurityDetectionsAPIBulkActionEditPayloadTagsType = "set_tags" +) - // StatusCode Error status code - StatusCode *float32 `json:"statusCode,omitempty"` -} +// Defines values for SecurityDetectionsAPIBulkActionEditPayloadTimelineType. +const ( + SetTimeline SecurityDetectionsAPIBulkActionEditPayloadTimelineType = "set_timeline" +) -// APMUI404Response defines model for APM_UI_404_response. -type APMUI404Response struct { - // Error Error type - Error *string `json:"error,omitempty"` +// Defines values for SecurityDetectionsAPIBulkActionsDryRunErrCode. +const ( + ESQLINDEXPATTERN SecurityDetectionsAPIBulkActionsDryRunErrCode = "ESQL_INDEX_PATTERN" + IMMUTABLE SecurityDetectionsAPIBulkActionsDryRunErrCode = "IMMUTABLE" + MACHINELEARNINGAUTH SecurityDetectionsAPIBulkActionsDryRunErrCode = "MACHINE_LEARNING_AUTH" + MACHINELEARNINGINDEXPATTERN SecurityDetectionsAPIBulkActionsDryRunErrCode = "MACHINE_LEARNING_INDEX_PATTERN" + MANUALRULERUNDISABLEDRULE SecurityDetectionsAPIBulkActionsDryRunErrCode = "MANUAL_RULE_RUN_DISABLED_RULE" + MANUALRULERUNFEATURE SecurityDetectionsAPIBulkActionsDryRunErrCode = "MANUAL_RULE_RUN_FEATURE" + PREBUILTCUSTOMIZATIONLICENSE SecurityDetectionsAPIBulkActionsDryRunErrCode = "PREBUILT_CUSTOMIZATION_LICENSE" + RULEFILLGAPSDISABLEDRULE SecurityDetectionsAPIBulkActionsDryRunErrCode = "RULE_FILL_GAPS_DISABLED_RULE" + THRESHOLDRULETYPEINSUPPRESSION SecurityDetectionsAPIBulkActionsDryRunErrCode = "THRESHOLD_RULE_TYPE_IN_SUPPRESSION" + UNSUPPORTEDRULEINSUPPRESSIONFORTHRESHOLD SecurityDetectionsAPIBulkActionsDryRunErrCode = "UNSUPPORTED_RULE_IN_SUPPRESSION_FOR_THRESHOLD" +) - // Message Error message - Message *string `json:"message,omitempty"` +// Defines values for SecurityDetectionsAPIBulkDeleteRulesAction. +const ( + SecurityDetectionsAPIBulkDeleteRulesActionDelete SecurityDetectionsAPIBulkDeleteRulesAction = "delete" +) - // StatusCode Error status code - StatusCode *float32 `json:"statusCode,omitempty"` -} +// Defines values for SecurityDetectionsAPIBulkDisableRulesAction. +const ( + Disable SecurityDetectionsAPIBulkDisableRulesAction = "disable" +) -// APMUIAgentConfigurationIntakeObject defines model for APM_UI_agent_configuration_intake_object. -type APMUIAgentConfigurationIntakeObject struct { - // AgentName The agent name is used by the UI to determine which settings to display. - AgentName *string `json:"agent_name,omitempty"` +// Defines values for SecurityDetectionsAPIBulkDuplicateRulesAction. +const ( + Duplicate SecurityDetectionsAPIBulkDuplicateRulesAction = "duplicate" +) - // Service Service - Service APMUIServiceObject `json:"service"` +// Defines values for SecurityDetectionsAPIBulkEditRulesAction. +const ( + Edit SecurityDetectionsAPIBulkEditRulesAction = "edit" +) - // Settings Agent configuration settings - Settings APMUISettingsObject `json:"settings"` -} +// Defines values for SecurityDetectionsAPIBulkEditSkipReason. +const ( + RULENOTMODIFIED SecurityDetectionsAPIBulkEditSkipReason = "RULE_NOT_MODIFIED" +) -// APMUIAgentConfigurationObject Agent configuration -type APMUIAgentConfigurationObject struct { - // Timestamp Timestamp - Timestamp float32 `json:"@timestamp"` +// Defines values for SecurityDetectionsAPIBulkEnableRulesAction. +const ( + Enable SecurityDetectionsAPIBulkEnableRulesAction = "enable" +) - // AgentName Agent name - AgentName *string `json:"agent_name,omitempty"` +// Defines values for SecurityDetectionsAPIBulkExportRulesAction. +const ( + Export SecurityDetectionsAPIBulkExportRulesAction = "export" +) - // AppliedByAgent Applied by agent - AppliedByAgent *bool `json:"applied_by_agent,omitempty"` +// Defines values for SecurityDetectionsAPIBulkGapsFillingSkipReason. +const ( + NOGAPSTOFILL SecurityDetectionsAPIBulkGapsFillingSkipReason = "NO_GAPS_TO_FILL" +) - // Etag `etag` is sent by the APM agent to indicate the `etag` of the last successfully applied configuration. If the `etag` matches an existing configuration its `applied_by_agent` property will be set to `true`. Every time a configuration is edited `applied_by_agent` is reset to `false`. - Etag string `json:"etag"` +// Defines values for SecurityDetectionsAPIBulkManualRuleFillGapsAction. +const ( + FillGaps SecurityDetectionsAPIBulkManualRuleFillGapsAction = "fill_gaps" +) - // Service Service - Service APMUIServiceObject `json:"service"` +// Defines values for SecurityDetectionsAPIBulkManualRuleRunAction. +const ( + Run SecurityDetectionsAPIBulkManualRuleRunAction = "run" +) - // Settings Agent configuration settings - Settings APMUISettingsObject `json:"settings"` -} +// Defines values for SecurityDetectionsAPIDefaultParamsCommand. +const ( + SecurityDetectionsAPIDefaultParamsCommandIsolate SecurityDetectionsAPIDefaultParamsCommand = "isolate" +) -// APMUIAgentConfigurationsResponse defines model for APM_UI_agent_configurations_response. -type APMUIAgentConfigurationsResponse struct { - // Configurations Agent configuration - Configurations *[]APMUIAgentConfigurationObject `json:"configurations,omitempty"` -} +// Defines values for SecurityDetectionsAPIEndpointResponseActionActionTypeId. +const ( + DotEndpoint SecurityDetectionsAPIEndpointResponseActionActionTypeId = ".endpoint" +) -// APMUIDeleteAgentConfigurationsResponse defines model for APM_UI_delete_agent_configurations_response. -type APMUIDeleteAgentConfigurationsResponse struct { - // Result Result - Result *string `json:"result,omitempty"` -} +// Defines values for SecurityDetectionsAPIEqlQueryLanguage. +const ( + SecurityDetectionsAPIEqlQueryLanguageEql SecurityDetectionsAPIEqlQueryLanguage = "eql" +) -// APMUIDeleteServiceObject Service -type APMUIDeleteServiceObject struct { - // Service Service - Service APMUIServiceObject `json:"service"` -} +// Defines values for SecurityDetectionsAPIEqlRequiredFieldsType. +const ( + SecurityDetectionsAPIEqlRequiredFieldsTypeEql SecurityDetectionsAPIEqlRequiredFieldsType = "eql" +) -// APMUIServiceObject Service -type APMUIServiceObject struct { - // Environment The environment of the service. - Environment *string `json:"environment,omitempty"` +// Defines values for SecurityDetectionsAPIEqlRuleType. +const ( + SecurityDetectionsAPIEqlRuleTypeEql SecurityDetectionsAPIEqlRuleType = "eql" +) - // Name The name of the service. - Name *string `json:"name,omitempty"` -} +// Defines values for SecurityDetectionsAPIEqlRuleCreateFieldsType. +const ( + SecurityDetectionsAPIEqlRuleCreateFieldsTypeEql SecurityDetectionsAPIEqlRuleCreateFieldsType = "eql" +) -// APMUISettingsObject Agent configuration settings -type APMUISettingsObject map[string]string +// Defines values for SecurityDetectionsAPIEqlRuleCreatePropsType. +const ( + SecurityDetectionsAPIEqlRuleCreatePropsTypeEql SecurityDetectionsAPIEqlRuleCreatePropsType = "eql" +) -// DataViews400Response defines model for Data_views_400_response. -type DataViews400Response struct { - Error string `json:"error"` - Message string `json:"message"` - StatusCode float32 `json:"statusCode"` -} +// Defines values for SecurityDetectionsAPIEqlRulePatchFieldsType. +const ( + SecurityDetectionsAPIEqlRulePatchFieldsTypeEql SecurityDetectionsAPIEqlRulePatchFieldsType = "eql" +) -// DataViews404Response defines model for Data_views_404_response. -type DataViews404Response struct { - Error *DataViews404ResponseError `json:"error,omitempty"` - Message *string `json:"message,omitempty"` - StatusCode *DataViews404ResponseStatusCode `json:"statusCode,omitempty"` -} +// Defines values for SecurityDetectionsAPIEqlRulePatchPropsType. +const ( + SecurityDetectionsAPIEqlRulePatchPropsTypeEql SecurityDetectionsAPIEqlRulePatchPropsType = "eql" +) -// DataViews404ResponseError defines model for DataViews404Response.Error. -type DataViews404ResponseError string +// Defines values for SecurityDetectionsAPIEqlRuleResponseFieldsType. +const ( + SecurityDetectionsAPIEqlRuleResponseFieldsTypeEql SecurityDetectionsAPIEqlRuleResponseFieldsType = "eql" +) -// DataViews404ResponseStatusCode defines model for DataViews404Response.StatusCode. -type DataViews404ResponseStatusCode int +// Defines values for SecurityDetectionsAPIEqlRuleUpdatePropsType. +const ( + SecurityDetectionsAPIEqlRuleUpdatePropsTypeEql SecurityDetectionsAPIEqlRuleUpdatePropsType = "eql" +) -// DataViewsAllownoindex Allows the data view saved object to exist before the data is available. -type DataViewsAllownoindex = bool +// Defines values for SecurityDetectionsAPIEsqlQueryLanguage. +const ( + SecurityDetectionsAPIEsqlQueryLanguageEsql SecurityDetectionsAPIEsqlQueryLanguage = "esql" +) -// DataViewsCreateDataViewRequestObject defines model for Data_views_create_data_view_request_object. -type DataViewsCreateDataViewRequestObject struct { - // DataView The data view object. - DataView DataViewsCreateDataViewRequestObjectInner `json:"data_view"` +// Defines values for SecurityDetectionsAPIEsqlRuleType. +const ( + SecurityDetectionsAPIEsqlRuleTypeEsql SecurityDetectionsAPIEsqlRuleType = "esql" +) - // Override Override an existing data view if a data view with the provided title already exists. - Override *bool `json:"override,omitempty"` -} +// Defines values for SecurityDetectionsAPIEsqlRuleCreateFieldsType. +const ( + SecurityDetectionsAPIEsqlRuleCreateFieldsTypeEsql SecurityDetectionsAPIEsqlRuleCreateFieldsType = "esql" +) -// DataViewsCreateDataViewRequestObjectInner The data view object. -type DataViewsCreateDataViewRequestObjectInner struct { - // AllowNoIndex Allows the data view saved object to exist before the data is available. - AllowNoIndex *DataViewsAllownoindex `json:"allowNoIndex,omitempty"` - FieldAttrs *map[string]DataViewsFieldattrs `json:"fieldAttrs,omitempty"` +// Defines values for SecurityDetectionsAPIEsqlRuleCreatePropsType. +const ( + SecurityDetectionsAPIEsqlRuleCreatePropsTypeEsql SecurityDetectionsAPIEsqlRuleCreatePropsType = "esql" +) - // FieldFormats A map of field formats by field name. - FieldFormats *DataViewsFieldformats `json:"fieldFormats,omitempty"` - Fields *map[string]interface{} `json:"fields,omitempty"` - Id *string `json:"id,omitempty"` +// Defines values for SecurityDetectionsAPIEsqlRulePatchPropsType. +const ( + SecurityDetectionsAPIEsqlRulePatchPropsTypeEsql SecurityDetectionsAPIEsqlRulePatchPropsType = "esql" +) - // Name The data view name. - Name *string `json:"name,omitempty"` +// Defines values for SecurityDetectionsAPIEsqlRuleRequiredFieldsType. +const ( + SecurityDetectionsAPIEsqlRuleRequiredFieldsTypeEsql SecurityDetectionsAPIEsqlRuleRequiredFieldsType = "esql" +) - // Namespaces An array of space identifiers for sharing the data view between multiple spaces. - Namespaces *DataViewsNamespaces `json:"namespaces,omitempty"` - RuntimeFieldMap *map[string]DataViewsRuntimefieldmap `json:"runtimeFieldMap,omitempty"` +// Defines values for SecurityDetectionsAPIEsqlRuleResponseFieldsType. +const ( + SecurityDetectionsAPIEsqlRuleResponseFieldsTypeEsql SecurityDetectionsAPIEsqlRuleResponseFieldsType = "esql" +) - // SourceFilters The array of field names you want to filter out in Discover. - SourceFilters *DataViewsSourcefilters `json:"sourceFilters,omitempty"` +// Defines values for SecurityDetectionsAPIEsqlRuleUpdatePropsType. +const ( + SecurityDetectionsAPIEsqlRuleUpdatePropsTypeEsql SecurityDetectionsAPIEsqlRuleUpdatePropsType = "esql" +) - // TimeFieldName The timestamp field name, which you use for time-based data views. - TimeFieldName *DataViewsTimefieldname `json:"timeFieldName,omitempty"` +// Defines values for SecurityDetectionsAPIExceptionListType. +const ( + SecurityDetectionsAPIExceptionListTypeDetection SecurityDetectionsAPIExceptionListType = "detection" + SecurityDetectionsAPIExceptionListTypeEndpoint SecurityDetectionsAPIExceptionListType = "endpoint" + SecurityDetectionsAPIExceptionListTypeEndpointBlocklists SecurityDetectionsAPIExceptionListType = "endpoint_blocklists" + SecurityDetectionsAPIExceptionListTypeEndpointEvents SecurityDetectionsAPIExceptionListType = "endpoint_events" + SecurityDetectionsAPIExceptionListTypeEndpointHostIsolationExceptions SecurityDetectionsAPIExceptionListType = "endpoint_host_isolation_exceptions" + SecurityDetectionsAPIExceptionListTypeEndpointTrustedApps SecurityDetectionsAPIExceptionListType = "endpoint_trusted_apps" + SecurityDetectionsAPIExceptionListTypeEndpointTrustedDevices SecurityDetectionsAPIExceptionListType = "endpoint_trusted_devices" + SecurityDetectionsAPIExceptionListTypeRuleDefault SecurityDetectionsAPIExceptionListType = "rule_default" +) - // Title Comma-separated list of data streams, indices, and aliases that you want to search. Supports wildcards (`*`). - Title DataViewsTitle `json:"title"` +// Defines values for SecurityDetectionsAPIExternalRuleSourceType. +const ( + External SecurityDetectionsAPIExternalRuleSourceType = "external" +) - // Type When set to `rollup`, identifies the rollup data views. - Type *DataViewsType `json:"type,omitempty"` +// Defines values for SecurityDetectionsAPIFindRulesSortField. +const ( + SecurityDetectionsAPIFindRulesSortFieldCreatedAt SecurityDetectionsAPIFindRulesSortField = "created_at" + SecurityDetectionsAPIFindRulesSortFieldCreatedAt1 SecurityDetectionsAPIFindRulesSortField = "createdAt" + SecurityDetectionsAPIFindRulesSortFieldEnabled SecurityDetectionsAPIFindRulesSortField = "enabled" + SecurityDetectionsAPIFindRulesSortFieldExecutionSummaryLastExecutionDate SecurityDetectionsAPIFindRulesSortField = "execution_summary.last_execution.date" + SecurityDetectionsAPIFindRulesSortFieldExecutionSummaryLastExecutionMetricsExecutionGapDurationS SecurityDetectionsAPIFindRulesSortField = "execution_summary.last_execution.metrics.execution_gap_duration_s" + SecurityDetectionsAPIFindRulesSortFieldExecutionSummaryLastExecutionMetricsTotalIndexingDurationMs SecurityDetectionsAPIFindRulesSortField = "execution_summary.last_execution.metrics.total_indexing_duration_ms" + SecurityDetectionsAPIFindRulesSortFieldExecutionSummaryLastExecutionMetricsTotalSearchDurationMs SecurityDetectionsAPIFindRulesSortField = "execution_summary.last_execution.metrics.total_search_duration_ms" + SecurityDetectionsAPIFindRulesSortFieldExecutionSummaryLastExecutionStatus SecurityDetectionsAPIFindRulesSortField = "execution_summary.last_execution.status" + SecurityDetectionsAPIFindRulesSortFieldName SecurityDetectionsAPIFindRulesSortField = "name" + SecurityDetectionsAPIFindRulesSortFieldRiskScore SecurityDetectionsAPIFindRulesSortField = "risk_score" + SecurityDetectionsAPIFindRulesSortFieldRiskScore1 SecurityDetectionsAPIFindRulesSortField = "riskScore" + SecurityDetectionsAPIFindRulesSortFieldSeverity SecurityDetectionsAPIFindRulesSortField = "severity" + SecurityDetectionsAPIFindRulesSortFieldUpdatedAt SecurityDetectionsAPIFindRulesSortField = "updated_at" + SecurityDetectionsAPIFindRulesSortFieldUpdatedAt1 SecurityDetectionsAPIFindRulesSortField = "updatedAt" +) - // TypeMeta When you use rollup indices, contains the field list for the rollup data view API endpoints. - TypeMeta *DataViewsTypemeta `json:"typeMeta,omitempty"` - Version *string `json:"version,omitempty"` -} +// Defines values for SecurityDetectionsAPIInternalRuleSourceType. +const ( + Internal SecurityDetectionsAPIInternalRuleSourceType = "internal" +) -// DataViewsDataViewResponseObject defines model for Data_views_data_view_response_object. -type DataViewsDataViewResponseObject struct { - DataView *DataViewsDataViewResponseObjectInner `json:"data_view,omitempty"` -} +// Defines values for SecurityDetectionsAPIKqlQueryLanguage. +const ( + Kuery SecurityDetectionsAPIKqlQueryLanguage = "kuery" + Lucene SecurityDetectionsAPIKqlQueryLanguage = "lucene" +) -// DataViewsDataViewResponseObjectInner defines model for Data_views_data_view_response_object_inner. -type DataViewsDataViewResponseObjectInner struct { - // AllowNoIndex Allows the data view saved object to exist before the data is available. - AllowNoIndex *DataViewsAllownoindex `json:"allowNoIndex,omitempty"` - FieldAttrs *map[string]DataViewsFieldattrs `json:"fieldAttrs,omitempty"` +// Defines values for SecurityDetectionsAPIMachineLearningRuleType. +const ( + SecurityDetectionsAPIMachineLearningRuleTypeMachineLearning SecurityDetectionsAPIMachineLearningRuleType = "machine_learning" +) - // FieldFormats A map of field formats by field name. - FieldFormats *DataViewsFieldformats `json:"fieldFormats,omitempty"` - Fields *map[string]interface{} `json:"fields,omitempty"` - Id *string `json:"id,omitempty"` +// Defines values for SecurityDetectionsAPIMachineLearningRuleCreateFieldsType. +const ( + SecurityDetectionsAPIMachineLearningRuleCreateFieldsTypeMachineLearning SecurityDetectionsAPIMachineLearningRuleCreateFieldsType = "machine_learning" +) - // Name The data view name. - Name *string `json:"name,omitempty"` +// Defines values for SecurityDetectionsAPIMachineLearningRuleCreatePropsType. +const ( + SecurityDetectionsAPIMachineLearningRuleCreatePropsTypeMachineLearning SecurityDetectionsAPIMachineLearningRuleCreatePropsType = "machine_learning" +) - // Namespaces An array of space identifiers for sharing the data view between multiple spaces. - Namespaces *DataViewsNamespaces `json:"namespaces,omitempty"` - RuntimeFieldMap *map[string]DataViewsRuntimefieldmap `json:"runtimeFieldMap,omitempty"` +// Defines values for SecurityDetectionsAPIMachineLearningRulePatchFieldsType. +const ( + SecurityDetectionsAPIMachineLearningRulePatchFieldsTypeMachineLearning SecurityDetectionsAPIMachineLearningRulePatchFieldsType = "machine_learning" +) - // SourceFilters The array of field names you want to filter out in Discover. - SourceFilters *DataViewsSourcefilters `json:"sourceFilters,omitempty"` +// Defines values for SecurityDetectionsAPIMachineLearningRulePatchPropsType. +const ( + SecurityDetectionsAPIMachineLearningRulePatchPropsTypeMachineLearning SecurityDetectionsAPIMachineLearningRulePatchPropsType = "machine_learning" +) - // TimeFieldName The timestamp field name, which you use for time-based data views. - TimeFieldName *DataViewsTimefieldname `json:"timeFieldName,omitempty"` +// Defines values for SecurityDetectionsAPIMachineLearningRuleRequiredFieldsType. +const ( + SecurityDetectionsAPIMachineLearningRuleRequiredFieldsTypeMachineLearning SecurityDetectionsAPIMachineLearningRuleRequiredFieldsType = "machine_learning" +) - // Title Comma-separated list of data streams, indices, and aliases that you want to search. Supports wildcards (`*`). - Title *DataViewsTitle `json:"title,omitempty"` +// Defines values for SecurityDetectionsAPIMachineLearningRuleResponseFieldsType. +const ( + SecurityDetectionsAPIMachineLearningRuleResponseFieldsTypeMachineLearning SecurityDetectionsAPIMachineLearningRuleResponseFieldsType = "machine_learning" +) - // TypeMeta When you use rollup indices, contains the field list for the rollup data view API endpoints. - TypeMeta *DataViewsTypemetaResponse `json:"typeMeta,omitempty"` - Version *string `json:"version,omitempty"` -} +// Defines values for SecurityDetectionsAPIMachineLearningRuleUpdatePropsType. +const ( + SecurityDetectionsAPIMachineLearningRuleUpdatePropsTypeMachineLearning SecurityDetectionsAPIMachineLearningRuleUpdatePropsType = "machine_learning" +) -// DataViewsFieldattrs A map of field attributes by field name. -type DataViewsFieldattrs struct { - // Count Popularity count for the field. - Count *int `json:"count,omitempty"` +// Defines values for SecurityDetectionsAPIMigrationCleanupResultStatus. +const ( + SecurityDetectionsAPIMigrationCleanupResultStatusFailure SecurityDetectionsAPIMigrationCleanupResultStatus = "failure" + SecurityDetectionsAPIMigrationCleanupResultStatusPending SecurityDetectionsAPIMigrationCleanupResultStatus = "pending" + SecurityDetectionsAPIMigrationCleanupResultStatusSuccess SecurityDetectionsAPIMigrationCleanupResultStatus = "success" +) - // CustomDescription Custom description for the field. - CustomDescription *string `json:"customDescription,omitempty"` +// Defines values for SecurityDetectionsAPIMigrationFinalizationResultStatus. +const ( + SecurityDetectionsAPIMigrationFinalizationResultStatusFailure SecurityDetectionsAPIMigrationFinalizationResultStatus = "failure" + SecurityDetectionsAPIMigrationFinalizationResultStatusPending SecurityDetectionsAPIMigrationFinalizationResultStatus = "pending" + SecurityDetectionsAPIMigrationFinalizationResultStatusSuccess SecurityDetectionsAPIMigrationFinalizationResultStatus = "success" +) - // CustomLabel Custom label for the field. - CustomLabel *string `json:"customLabel,omitempty"` -} +// Defines values for SecurityDetectionsAPIMigrationStatusStatus. +const ( + Failure SecurityDetectionsAPIMigrationStatusStatus = "failure" + Pending SecurityDetectionsAPIMigrationStatusStatus = "pending" + Success SecurityDetectionsAPIMigrationStatusStatus = "success" +) -// DataViewsFieldformat defines model for Data_views_fieldformat. -type DataViewsFieldformat struct { - Id *string `json:"id,omitempty"` - Params *DataViewsFieldformatParams `json:"params,omitempty"` -} +// Defines values for SecurityDetectionsAPINewTermsRuleType. +const ( + SecurityDetectionsAPINewTermsRuleTypeNewTerms SecurityDetectionsAPINewTermsRuleType = "new_terms" +) -// DataViewsFieldformatParams defines model for Data_views_fieldformat_params. -type DataViewsFieldformatParams struct { - Colors *[]DataViewsFieldformatParamsColor `json:"colors,omitempty"` - FieldLength *int `json:"fieldLength,omitempty"` - FieldType *string `json:"fieldType,omitempty"` - Height *int `json:"height,omitempty"` - IncludeSpaceWithSuffix *bool `json:"includeSpaceWithSuffix,omitempty"` - InputFormat *string `json:"inputFormat,omitempty"` - LabelTemplate *string `json:"labelTemplate,omitempty"` - LookupEntries *[]DataViewsFieldformatParamsLookup `json:"lookupEntries,omitempty"` - OutputFormat *string `json:"outputFormat,omitempty"` - OutputPrecision *int `json:"outputPrecision,omitempty"` - Pattern *string `json:"pattern,omitempty"` - Timezone *string `json:"timezone,omitempty"` - Transform *string `json:"transform,omitempty"` - Type *string `json:"type,omitempty"` - UnknownKeyValue *string `json:"unknownKeyValue,omitempty"` - UrlTemplate *string `json:"urlTemplate,omitempty"` - UseShortSuffix *bool `json:"useShortSuffix,omitempty"` - Width *int `json:"width,omitempty"` -} +// Defines values for SecurityDetectionsAPINewTermsRuleCreateFieldsType. +const ( + SecurityDetectionsAPINewTermsRuleCreateFieldsTypeNewTerms SecurityDetectionsAPINewTermsRuleCreateFieldsType = "new_terms" +) -// DataViewsFieldformatParamsColor defines model for Data_views_fieldformat_params_color. -type DataViewsFieldformatParamsColor struct { - Background *string `json:"background,omitempty"` - Range *string `json:"range,omitempty"` - Regex *string `json:"regex,omitempty"` - Text *string `json:"text,omitempty"` -} +// Defines values for SecurityDetectionsAPINewTermsRuleCreatePropsType. +const ( + SecurityDetectionsAPINewTermsRuleCreatePropsTypeNewTerms SecurityDetectionsAPINewTermsRuleCreatePropsType = "new_terms" +) -// DataViewsFieldformatParamsLookup defines model for Data_views_fieldformat_params_lookup. -type DataViewsFieldformatParamsLookup struct { - Key *string `json:"key,omitempty"` - Value *string `json:"value,omitempty"` -} +// Defines values for SecurityDetectionsAPINewTermsRulePatchFieldsType. +const ( + SecurityDetectionsAPINewTermsRulePatchFieldsTypeNewTerms SecurityDetectionsAPINewTermsRulePatchFieldsType = "new_terms" +) -// DataViewsFieldformats A map of field formats by field name. -type DataViewsFieldformats map[string]DataViewsFieldformat +// Defines values for SecurityDetectionsAPINewTermsRulePatchPropsType. +const ( + SecurityDetectionsAPINewTermsRulePatchPropsTypeNewTerms SecurityDetectionsAPINewTermsRulePatchPropsType = "new_terms" +) -// DataViewsNamespaces An array of space identifiers for sharing the data view between multiple spaces. -type DataViewsNamespaces = []string +// Defines values for SecurityDetectionsAPINewTermsRuleRequiredFieldsType. +const ( + SecurityDetectionsAPINewTermsRuleRequiredFieldsTypeNewTerms SecurityDetectionsAPINewTermsRuleRequiredFieldsType = "new_terms" +) -// DataViewsRuntimefieldmap A map of runtime field definitions by field name. -type DataViewsRuntimefieldmap struct { - Script DataViewsRuntimefieldmapScript `json:"script"` +// Defines values for SecurityDetectionsAPINewTermsRuleResponseFieldsType. +const ( + SecurityDetectionsAPINewTermsRuleResponseFieldsTypeNewTerms SecurityDetectionsAPINewTermsRuleResponseFieldsType = "new_terms" +) - // Type Mapping type of the runtime field. - Type string `json:"type"` -} +// Defines values for SecurityDetectionsAPINewTermsRuleUpdatePropsType. +const ( + SecurityDetectionsAPINewTermsRuleUpdatePropsTypeNewTerms SecurityDetectionsAPINewTermsRuleUpdatePropsType = "new_terms" +) -// DataViewsRuntimefieldmapScript defines model for Data_views_runtimefieldmap_script. -type DataViewsRuntimefieldmapScript struct { - // Source Script for the runtime field. - Source *string `json:"source,omitempty"` -} +// Defines values for SecurityDetectionsAPIOsqueryResponseActionActionTypeId. +const ( + DotOsquery SecurityDetectionsAPIOsqueryResponseActionActionTypeId = ".osquery" +) -// DataViewsSourcefilterItem defines model for Data_views_sourcefilter_item. -type DataViewsSourcefilterItem struct { - Value string `json:"value"` -} +// Defines values for SecurityDetectionsAPIProcessesParamsCommand. +const ( + KillProcess SecurityDetectionsAPIProcessesParamsCommand = "kill-process" + SuspendProcess SecurityDetectionsAPIProcessesParamsCommand = "suspend-process" +) -// DataViewsSourcefilters The array of field names you want to filter out in Discover. -type DataViewsSourcefilters = []DataViewsSourcefilterItem +// Defines values for SecurityDetectionsAPIQueryRuleType. +const ( + SecurityDetectionsAPIQueryRuleTypeQuery SecurityDetectionsAPIQueryRuleType = "query" +) -// DataViewsTimefieldname The timestamp field name, which you use for time-based data views. -type DataViewsTimefieldname = string +// Defines values for SecurityDetectionsAPIQueryRuleCreateFieldsType. +const ( + SecurityDetectionsAPIQueryRuleCreateFieldsTypeQuery SecurityDetectionsAPIQueryRuleCreateFieldsType = "query" +) -// DataViewsTitle Comma-separated list of data streams, indices, and aliases that you want to search. Supports wildcards (`*`). -type DataViewsTitle = string +// Defines values for SecurityDetectionsAPIQueryRuleCreatePropsType. +const ( + SecurityDetectionsAPIQueryRuleCreatePropsTypeQuery SecurityDetectionsAPIQueryRuleCreatePropsType = "query" +) -// DataViewsType When set to `rollup`, identifies the rollup data views. -type DataViewsType = string +// Defines values for SecurityDetectionsAPIQueryRulePatchFieldsType. +const ( + SecurityDetectionsAPIQueryRulePatchFieldsTypeQuery SecurityDetectionsAPIQueryRulePatchFieldsType = "query" +) -// DataViewsTypemeta When you use rollup indices, contains the field list for the rollup data view API endpoints. -type DataViewsTypemeta struct { - // Aggs A map of rollup restrictions by aggregation type and field name. - Aggs map[string]interface{} `json:"aggs"` +// Defines values for SecurityDetectionsAPIQueryRulePatchPropsType. +const ( + SecurityDetectionsAPIQueryRulePatchPropsTypeQuery SecurityDetectionsAPIQueryRulePatchPropsType = "query" +) - // Params Properties for retrieving rollup fields. - Params map[string]interface{} `json:"params"` -} +// Defines values for SecurityDetectionsAPIQueryRuleRequiredFieldsType. +const ( + SecurityDetectionsAPIQueryRuleRequiredFieldsTypeQuery SecurityDetectionsAPIQueryRuleRequiredFieldsType = "query" +) -// DataViewsTypemetaResponse When you use rollup indices, contains the field list for the rollup data view API endpoints. -type DataViewsTypemetaResponse struct { - // Aggs A map of rollup restrictions by aggregation type and field name. - Aggs *map[string]interface{} `json:"aggs,omitempty"` +// Defines values for SecurityDetectionsAPIQueryRuleResponseFieldsType. +const ( + SecurityDetectionsAPIQueryRuleResponseFieldsTypeQuery SecurityDetectionsAPIQueryRuleResponseFieldsType = "query" +) - // Params Properties for retrieving rollup fields. - Params *map[string]interface{} `json:"params,omitempty"` -} +// Defines values for SecurityDetectionsAPIQueryRuleUpdatePropsType. +const ( + SecurityDetectionsAPIQueryRuleUpdatePropsTypeQuery SecurityDetectionsAPIQueryRuleUpdatePropsType = "query" +) -// DataViewsUpdateDataViewRequestObject defines model for Data_views_update_data_view_request_object. -type DataViewsUpdateDataViewRequestObject struct { - // DataView The data view properties you want to update. Only the specified properties are updated in the data view. Unspecified fields stay as they are persisted. - DataView DataViewsUpdateDataViewRequestObjectInner `json:"data_view"` +// Defines values for SecurityDetectionsAPIRiskScoreMappingOperator. +const ( + SecurityDetectionsAPIRiskScoreMappingOperatorEquals SecurityDetectionsAPIRiskScoreMappingOperator = "equals" +) - // RefreshFields Reloads the data view fields after the data view is updated. - RefreshFields *bool `json:"refresh_fields,omitempty"` -} +// Defines values for SecurityDetectionsAPIRuleActionNotifyWhen. +const ( + SecurityDetectionsAPIRuleActionNotifyWhenOnActionGroupChange SecurityDetectionsAPIRuleActionNotifyWhen = "onActionGroupChange" + SecurityDetectionsAPIRuleActionNotifyWhenOnActiveAlert SecurityDetectionsAPIRuleActionNotifyWhen = "onActiveAlert" + SecurityDetectionsAPIRuleActionNotifyWhenOnThrottleInterval SecurityDetectionsAPIRuleActionNotifyWhen = "onThrottleInterval" +) -// DataViewsUpdateDataViewRequestObjectInner The data view properties you want to update. Only the specified properties are updated in the data view. Unspecified fields stay as they are persisted. -type DataViewsUpdateDataViewRequestObjectInner struct { - // AllowNoIndex Allows the data view saved object to exist before the data is available. - AllowNoIndex *DataViewsAllownoindex `json:"allowNoIndex,omitempty"` +// Defines values for SecurityDetectionsAPIRuleActionThrottle0. +const ( + SecurityDetectionsAPIRuleActionThrottle0NoActions SecurityDetectionsAPIRuleActionThrottle0 = "no_actions" + SecurityDetectionsAPIRuleActionThrottle0Rule SecurityDetectionsAPIRuleActionThrottle0 = "rule" +) - // FieldFormats A map of field formats by field name. - FieldFormats *DataViewsFieldformats `json:"fieldFormats,omitempty"` - Fields *map[string]interface{} `json:"fields,omitempty"` - Name *string `json:"name,omitempty"` - RuntimeFieldMap *map[string]DataViewsRuntimefieldmap `json:"runtimeFieldMap,omitempty"` +// Defines values for SecurityDetectionsAPIRuleExceptionListNamespaceType. +const ( + SecurityDetectionsAPIRuleExceptionListNamespaceTypeAgnostic SecurityDetectionsAPIRuleExceptionListNamespaceType = "agnostic" + SecurityDetectionsAPIRuleExceptionListNamespaceTypeSingle SecurityDetectionsAPIRuleExceptionListNamespaceType = "single" +) - // SourceFilters The array of field names you want to filter out in Discover. - SourceFilters *DataViewsSourcefilters `json:"sourceFilters,omitempty"` +// Defines values for SecurityDetectionsAPIRuleExecutionStatus. +const ( + SecurityDetectionsAPIRuleExecutionStatusFailed SecurityDetectionsAPIRuleExecutionStatus = "failed" + SecurityDetectionsAPIRuleExecutionStatusGoingToRun SecurityDetectionsAPIRuleExecutionStatus = "going to run" + SecurityDetectionsAPIRuleExecutionStatusPartialFailure SecurityDetectionsAPIRuleExecutionStatus = "partial failure" + SecurityDetectionsAPIRuleExecutionStatusRunning SecurityDetectionsAPIRuleExecutionStatus = "running" + SecurityDetectionsAPIRuleExecutionStatusSucceeded SecurityDetectionsAPIRuleExecutionStatus = "succeeded" +) - // TimeFieldName The timestamp field name, which you use for time-based data views. - TimeFieldName *DataViewsTimefieldname `json:"timeFieldName,omitempty"` +// Defines values for SecurityDetectionsAPISavedObjectResolveAliasPurpose. +const ( + SecurityDetectionsAPISavedObjectResolveAliasPurposeSavedObjectConversion SecurityDetectionsAPISavedObjectResolveAliasPurpose = "savedObjectConversion" + SecurityDetectionsAPISavedObjectResolveAliasPurposeSavedObjectImport SecurityDetectionsAPISavedObjectResolveAliasPurpose = "savedObjectImport" +) - // Title Comma-separated list of data streams, indices, and aliases that you want to search. Supports wildcards (`*`). - Title *DataViewsTitle `json:"title,omitempty"` +// Defines values for SecurityDetectionsAPISavedObjectResolveOutcome. +const ( + SecurityDetectionsAPISavedObjectResolveOutcomeAliasMatch SecurityDetectionsAPISavedObjectResolveOutcome = "aliasMatch" + SecurityDetectionsAPISavedObjectResolveOutcomeConflict SecurityDetectionsAPISavedObjectResolveOutcome = "conflict" + SecurityDetectionsAPISavedObjectResolveOutcomeExactMatch SecurityDetectionsAPISavedObjectResolveOutcome = "exactMatch" +) - // Type When set to `rollup`, identifies the rollup data views. - Type *DataViewsType `json:"type,omitempty"` +// Defines values for SecurityDetectionsAPISavedQueryRuleType. +const ( + SecurityDetectionsAPISavedQueryRuleTypeSavedQuery SecurityDetectionsAPISavedQueryRuleType = "saved_query" +) - // TypeMeta When you use rollup indices, contains the field list for the rollup data view API endpoints. - TypeMeta *DataViewsTypemeta `json:"typeMeta,omitempty"` -} +// Defines values for SecurityDetectionsAPISavedQueryRuleCreateFieldsType. +const ( + SecurityDetectionsAPISavedQueryRuleCreateFieldsTypeSavedQuery SecurityDetectionsAPISavedQueryRuleCreateFieldsType = "saved_query" +) -// SyntheticsGetParameterResponse defines model for Synthetics_getParameterResponse. -type SyntheticsGetParameterResponse struct { - // Description The description of the parameter. It is included in the response if the user has read-only permissions to the Synthetics app. - Description *string `json:"description,omitempty"` +// Defines values for SecurityDetectionsAPISavedQueryRuleCreatePropsType. +const ( + SecurityDetectionsAPISavedQueryRuleCreatePropsTypeSavedQuery SecurityDetectionsAPISavedQueryRuleCreatePropsType = "saved_query" +) - // Id The unique identifier of the parameter. - Id *string `json:"id,omitempty"` +// Defines values for SecurityDetectionsAPISavedQueryRulePatchFieldsType. +const ( + SecurityDetectionsAPISavedQueryRulePatchFieldsTypeSavedQuery SecurityDetectionsAPISavedQueryRulePatchFieldsType = "saved_query" +) - // Key The key of the parameter. - Key *string `json:"key,omitempty"` +// Defines values for SecurityDetectionsAPISavedQueryRulePatchPropsType. +const ( + SecurityDetectionsAPISavedQueryRulePatchPropsTypeSavedQuery SecurityDetectionsAPISavedQueryRulePatchPropsType = "saved_query" +) - // Namespaces The namespaces associated with the parameter. It is included in the response if the user has read-only permissions to the Synthetics app. - Namespaces *[]string `json:"namespaces,omitempty"` +// Defines values for SecurityDetectionsAPISavedQueryRuleRequiredFieldsType. +const ( + SecurityDetectionsAPISavedQueryRuleRequiredFieldsTypeSavedQuery SecurityDetectionsAPISavedQueryRuleRequiredFieldsType = "saved_query" +) - // Tags An array of tags associated with the parameter. It is included in the response if the user has read-only permissions to the Synthetics app. - Tags *[]string `json:"tags,omitempty"` +// Defines values for SecurityDetectionsAPISavedQueryRuleResponseFieldsType. +const ( + SecurityDetectionsAPISavedQueryRuleResponseFieldsTypeSavedQuery SecurityDetectionsAPISavedQueryRuleResponseFieldsType = "saved_query" +) - // Value The value associated with the parameter. It will be included in the response if the user has write permissions. - Value *string `json:"value,omitempty"` -} +// Defines values for SecurityDetectionsAPISavedQueryRuleUpdatePropsType. +const ( + SecurityDetectionsAPISavedQueryRuleUpdatePropsTypeSavedQuery SecurityDetectionsAPISavedQueryRuleUpdatePropsType = "saved_query" +) -// SyntheticsParameterRequest defines model for Synthetics_parameterRequest. -type SyntheticsParameterRequest struct { - // Description A description of the parameter. - Description *string `json:"description,omitempty"` +// Defines values for SecurityDetectionsAPISetAlertsStatusByQueryConflicts. +const ( + Abort SecurityDetectionsAPISetAlertsStatusByQueryConflicts = "abort" + Proceed SecurityDetectionsAPISetAlertsStatusByQueryConflicts = "proceed" +) - // Key The key of the parameter. - Key string `json:"key"` +// Defines values for SecurityDetectionsAPISeverity. +const ( + SecurityDetectionsAPISeverityCritical SecurityDetectionsAPISeverity = "critical" + SecurityDetectionsAPISeverityHigh SecurityDetectionsAPISeverity = "high" + SecurityDetectionsAPISeverityLow SecurityDetectionsAPISeverity = "low" + SecurityDetectionsAPISeverityMedium SecurityDetectionsAPISeverity = "medium" +) - // ShareAcrossSpaces Specify whether the parameter should be shared across spaces. - ShareAcrossSpaces *bool `json:"share_across_spaces,omitempty"` +// Defines values for SecurityDetectionsAPISeverityMappingOperator. +const ( + SecurityDetectionsAPISeverityMappingOperatorEquals SecurityDetectionsAPISeverityMappingOperator = "equals" +) - // Tags An array of tags to categorize the parameter. - Tags *[]string `json:"tags,omitempty"` +// Defines values for SecurityDetectionsAPISortOrder. +const ( + SecurityDetectionsAPISortOrderAsc SecurityDetectionsAPISortOrder = "asc" + SecurityDetectionsAPISortOrderDesc SecurityDetectionsAPISortOrder = "desc" +) - // Value The value associated with the parameter. - Value string `json:"value"` -} +// Defines values for SecurityDetectionsAPIThreatMappingEntryType. +const ( + Mapping SecurityDetectionsAPIThreatMappingEntryType = "mapping" +) -// SyntheticsPostParameterResponse defines model for Synthetics_postParameterResponse. -type SyntheticsPostParameterResponse struct { - // Description A description of the parameter. - Description *string `json:"description,omitempty"` +// Defines values for SecurityDetectionsAPIThreatMatchRuleType. +const ( + SecurityDetectionsAPIThreatMatchRuleTypeThreatMatch SecurityDetectionsAPIThreatMatchRuleType = "threat_match" +) - // Id The unique identifier for the parameter. - Id *string `json:"id,omitempty"` +// Defines values for SecurityDetectionsAPIThreatMatchRuleCreateFieldsType. +const ( + SecurityDetectionsAPIThreatMatchRuleCreateFieldsTypeThreatMatch SecurityDetectionsAPIThreatMatchRuleCreateFieldsType = "threat_match" +) - // Key The parameter key. - Key *string `json:"key,omitempty"` +// Defines values for SecurityDetectionsAPIThreatMatchRuleCreatePropsType. +const ( + SecurityDetectionsAPIThreatMatchRuleCreatePropsTypeThreatMatch SecurityDetectionsAPIThreatMatchRuleCreatePropsType = "threat_match" +) - // ShareAcrossSpaces Indicates whether the parameter is shared across spaces. - ShareAcrossSpaces *bool `json:"share_across_spaces,omitempty"` +// Defines values for SecurityDetectionsAPIThreatMatchRulePatchFieldsType. +const ( + SecurityDetectionsAPIThreatMatchRulePatchFieldsTypeThreatMatch SecurityDetectionsAPIThreatMatchRulePatchFieldsType = "threat_match" +) - // Tags An array of tags associated with the parameter. - Tags *[]string `json:"tags,omitempty"` +// Defines values for SecurityDetectionsAPIThreatMatchRulePatchPropsType. +const ( + SecurityDetectionsAPIThreatMatchRulePatchPropsTypeThreatMatch SecurityDetectionsAPIThreatMatchRulePatchPropsType = "threat_match" +) - // Value The value associated with the parameter. - Value *string `json:"value,omitempty"` -} +// Defines values for SecurityDetectionsAPIThreatMatchRuleRequiredFieldsType. +const ( + SecurityDetectionsAPIThreatMatchRuleRequiredFieldsTypeThreatMatch SecurityDetectionsAPIThreatMatchRuleRequiredFieldsType = "threat_match" +) -// AgentPolicy defines model for agent_policy. -type AgentPolicy struct { - AdvancedSettings *struct { - AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory,omitempty"` - AgentDownloadTimeout interface{} `json:"agent_download_timeout,omitempty"` - AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs,omitempty"` - AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval,omitempty"` - AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles,omitempty"` - AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes,omitempty"` - AgentLoggingLevel interface{} `json:"agent_logging_level,omitempty"` - AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period,omitempty"` - AgentLoggingToFiles interface{} `json:"agent_logging_to_files,omitempty"` - } `json:"advanced_settings,omitempty"` - AgentFeatures *[]struct { - Enabled bool `json:"enabled"` - Name string `json:"name"` - } `json:"agent_features,omitempty"` - Agentless *struct { - CloudConnectors *struct { - Enabled bool `json:"enabled"` - TargetCsp *string `json:"target_csp,omitempty"` - } `json:"cloud_connectors,omitempty"` - Resources *struct { - Requests *struct { - Cpu *string `json:"cpu,omitempty"` - Memory *string `json:"memory,omitempty"` - } `json:"requests,omitempty"` - } `json:"resources,omitempty"` - } `json:"agentless,omitempty"` - Agents *float32 `json:"agents,omitempty"` - DataOutputId *string `json:"data_output_id,omitempty"` - Description *string `json:"description,omitempty"` - DownloadSourceId *string `json:"download_source_id,omitempty"` - FleetServerHostId *string `json:"fleet_server_host_id,omitempty"` +// Defines values for SecurityDetectionsAPIThreatMatchRuleResponseFieldsType. +const ( + SecurityDetectionsAPIThreatMatchRuleResponseFieldsTypeThreatMatch SecurityDetectionsAPIThreatMatchRuleResponseFieldsType = "threat_match" +) - // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. - GlobalDataTags *[]AgentPolicyGlobalDataTagsItem `json:"global_data_tags,omitempty"` - HasFleetServer *bool `json:"has_fleet_server,omitempty"` - Id string `json:"id"` - InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` - IsManaged bool `json:"is_managed"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` +// Defines values for SecurityDetectionsAPIThreatMatchRuleUpdatePropsType. +const ( + SecurityDetectionsAPIThreatMatchRuleUpdatePropsTypeThreatMatch SecurityDetectionsAPIThreatMatchRuleUpdatePropsType = "threat_match" +) - // IsProtected Indicates whether the agent policy has tamper protection enabled. Default false. - IsProtected bool `json:"is_protected"` +// Defines values for SecurityDetectionsAPIThresholdRuleType. +const ( + SecurityDetectionsAPIThresholdRuleTypeThreshold SecurityDetectionsAPIThresholdRuleType = "threshold" +) - // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled - KeepMonitoringAlive *bool `json:"keep_monitoring_alive,omitempty"` - MonitoringDiagnostics *struct { - Limit *struct { - Burst *float32 `json:"burst,omitempty"` - Interval *string `json:"interval,omitempty"` - } `json:"limit,omitempty"` - Uploader *struct { - InitDur *string `json:"init_dur,omitempty"` - MaxDur *string `json:"max_dur,omitempty"` - MaxRetries *float32 `json:"max_retries,omitempty"` - } `json:"uploader,omitempty"` - } `json:"monitoring_diagnostics,omitempty"` - MonitoringEnabled *[]AgentPolicyMonitoringEnabled `json:"monitoring_enabled,omitempty"` - MonitoringHttp *struct { - Buffer *struct { - Enabled *bool `json:"enabled,omitempty"` - } `json:"buffer,omitempty"` - Enabled *bool `json:"enabled,omitempty"` - Host *string `json:"host,omitempty"` - Port *float32 `json:"port,omitempty"` - } `json:"monitoring_http,omitempty"` - MonitoringOutputId *string `json:"monitoring_output_id,omitempty"` - MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` - Name string `json:"name"` - Namespace string `json:"namespace"` +// Defines values for SecurityDetectionsAPIThresholdRuleCreateFieldsType. +const ( + SecurityDetectionsAPIThresholdRuleCreateFieldsTypeThreshold SecurityDetectionsAPIThresholdRuleCreateFieldsType = "threshold" +) - // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. - Overrides *map[string]interface{} `json:"overrides,omitempty"` - PackagePolicies *AgentPolicy_PackagePolicies `json:"package_policies,omitempty"` - RequiredVersions *[]struct { - // Percentage Target percentage of agents to auto upgrade - Percentage float32 `json:"percentage"` +// Defines values for SecurityDetectionsAPIThresholdRuleCreatePropsType. +const ( + SecurityDetectionsAPIThresholdRuleCreatePropsTypeThreshold SecurityDetectionsAPIThresholdRuleCreatePropsType = "threshold" +) - // Version Target version for automatic agent upgrade - Version string `json:"version"` - } `json:"required_versions,omitempty"` - Revision float32 `json:"revision"` - SchemaVersion *string `json:"schema_version,omitempty"` - SpaceIds *[]string `json:"space_ids,omitempty"` - Status AgentPolicyStatus `json:"status"` +// Defines values for SecurityDetectionsAPIThresholdRulePatchFieldsType. +const ( + SecurityDetectionsAPIThresholdRulePatchFieldsTypeThreshold SecurityDetectionsAPIThresholdRulePatchFieldsType = "threshold" +) - // SupportsAgentless Indicates whether the agent policy supports agentless integrations. - SupportsAgentless *bool `json:"supports_agentless,omitempty"` - UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` - UnprivilegedAgents *float32 `json:"unprivileged_agents,omitempty"` - UpdatedAt string `json:"updated_at"` - UpdatedBy string `json:"updated_by"` - Version *string `json:"version,omitempty"` -} +// Defines values for SecurityDetectionsAPIThresholdRulePatchPropsType. +const ( + SecurityDetectionsAPIThresholdRulePatchPropsTypeThreshold SecurityDetectionsAPIThresholdRulePatchPropsType = "threshold" +) -// AgentPolicyMonitoringEnabled defines model for AgentPolicy.MonitoringEnabled. -type AgentPolicyMonitoringEnabled string +// Defines values for SecurityDetectionsAPIThresholdRuleRequiredFieldsType. +const ( + SecurityDetectionsAPIThresholdRuleRequiredFieldsTypeThreshold SecurityDetectionsAPIThresholdRuleRequiredFieldsType = "threshold" +) -// AgentPolicyPackagePolicies0 defines model for . -type AgentPolicyPackagePolicies0 = []string +// Defines values for SecurityDetectionsAPIThresholdRuleResponseFieldsType. +const ( + SecurityDetectionsAPIThresholdRuleResponseFieldsTypeThreshold SecurityDetectionsAPIThresholdRuleResponseFieldsType = "threshold" +) -// AgentPolicyPackagePolicies1 This field is present only when retrieving a single agent policy, or when retrieving a list of agent policies with the ?full=true parameter -type AgentPolicyPackagePolicies1 = []struct { - // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. - AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions,omitempty"` - Agents *float32 `json:"agents,omitempty"` - CreatedAt string `json:"created_at"` - CreatedBy string `json:"created_by"` +// Defines values for SecurityDetectionsAPIThresholdRuleUpdatePropsType. +const ( + SecurityDetectionsAPIThresholdRuleUpdatePropsTypeThreshold SecurityDetectionsAPIThresholdRuleUpdatePropsType = "threshold" +) - // Description Package policy description - Description *string `json:"description,omitempty"` - Elasticsearch *AgentPolicy_PackagePolicies_1_Elasticsearch `json:"elasticsearch,omitempty"` - Enabled bool `json:"enabled"` - Id string `json:"id"` - Inputs AgentPolicy_PackagePolicies_1_Inputs `json:"inputs"` - IsManaged *bool `json:"is_managed,omitempty"` +// Defines values for SecurityDetectionsAPIThrottleForBulkActions. +const ( + SecurityDetectionsAPIThrottleForBulkActionsN1d SecurityDetectionsAPIThrottleForBulkActions = "1d" + SecurityDetectionsAPIThrottleForBulkActionsN1h SecurityDetectionsAPIThrottleForBulkActions = "1h" + SecurityDetectionsAPIThrottleForBulkActionsN7d SecurityDetectionsAPIThrottleForBulkActions = "7d" + SecurityDetectionsAPIThrottleForBulkActionsRule SecurityDetectionsAPIThrottleForBulkActions = "rule" +) - // Name Package policy name (should be unique) - Name string `json:"name"` +// Defines values for SecurityEndpointExceptionsAPIExceptionListItemEntryExistsType. +const ( + SecurityEndpointExceptionsAPIExceptionListItemEntryExistsTypeExists SecurityEndpointExceptionsAPIExceptionListItemEntryExistsType = "exists" +) - // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. - Namespace *string `json:"namespace,omitempty"` - OutputId *string `json:"output_id,omitempty"` +// Defines values for SecurityEndpointExceptionsAPIExceptionListItemEntryListType. +const ( + SecurityEndpointExceptionsAPIExceptionListItemEntryListTypeList SecurityEndpointExceptionsAPIExceptionListItemEntryListType = "list" +) - // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. - Overrides *struct { - Inputs *map[string]interface{} `json:"inputs,omitempty"` - } `json:"overrides,omitempty"` - Package *struct { - ExperimentalDataStreamFeatures *[]struct { - DataStream string `json:"data_stream"` - Features struct { - DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` - DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` - SyntheticSource *bool `json:"synthetic_source,omitempty"` - Tsdb *bool `json:"tsdb,omitempty"` - } `json:"features"` - } `json:"experimental_data_stream_features,omitempty"` +// Defines values for SecurityEndpointExceptionsAPIExceptionListItemEntryMatchType. +const ( + SecurityEndpointExceptionsAPIExceptionListItemEntryMatchTypeMatch SecurityEndpointExceptionsAPIExceptionListItemEntryMatchType = "match" +) - // Name Package name - Name string `json:"name"` - RequiresRoot *bool `json:"requires_root,omitempty"` - Title *string `json:"title,omitempty"` +// Defines values for SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAnyType. +const ( + SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAnyTypeMatchAny SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAnyType = "match_any" +) - // Version Package version - Version string `json:"version"` - } `json:"package,omitempty"` +// Defines values for SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcardType. +const ( + SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcardTypeWildcard SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcardType = "wildcard" +) - // PolicyId Agent policy ID where that package policy will be added - // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set - PolicyId *string `json:"policy_id,omitempty"` - PolicyIds *[]string `json:"policy_ids,omitempty"` - Revision float32 `json:"revision"` - SecretReferences *[]struct { - Id string `json:"id"` - } `json:"secret_references,omitempty"` - SpaceIds *[]string `json:"spaceIds,omitempty"` +// Defines values for SecurityEndpointExceptionsAPIExceptionListItemEntryNestedType. +const ( + SecurityEndpointExceptionsAPIExceptionListItemEntryNestedTypeNested SecurityEndpointExceptionsAPIExceptionListItemEntryNestedType = "nested" +) - // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. - SupportsAgentless *bool `json:"supports_agentless,omitempty"` - UpdatedAt string `json:"updated_at"` - UpdatedBy string `json:"updated_by"` - Vars *AgentPolicy_PackagePolicies_1_Vars `json:"vars,omitempty"` - Version *string `json:"version,omitempty"` -} +// Defines values for SecurityEndpointExceptionsAPIExceptionListItemEntryOperator. +const ( + SecurityEndpointExceptionsAPIExceptionListItemEntryOperatorExcluded SecurityEndpointExceptionsAPIExceptionListItemEntryOperator = "excluded" + SecurityEndpointExceptionsAPIExceptionListItemEntryOperatorIncluded SecurityEndpointExceptionsAPIExceptionListItemEntryOperator = "included" +) -// AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges defines model for AgentPolicy.PackagePolicies.1.Elasticsearch.Privileges. -type AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges struct { - Cluster *[]string `json:"cluster,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` -} +// Defines values for SecurityEndpointExceptionsAPIExceptionListItemType. +const ( + SecurityEndpointExceptionsAPIExceptionListItemTypeSimple SecurityEndpointExceptionsAPIExceptionListItemType = "simple" +) -// AgentPolicy_PackagePolicies_1_Elasticsearch defines model for AgentPolicy.PackagePolicies.1.Elasticsearch. -type AgentPolicy_PackagePolicies_1_Elasticsearch struct { - Privileges *AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges `json:"privileges,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` -} +// Defines values for SecurityEndpointExceptionsAPIExceptionListOsType. +const ( + SecurityEndpointExceptionsAPIExceptionListOsTypeLinux SecurityEndpointExceptionsAPIExceptionListOsType = "linux" + SecurityEndpointExceptionsAPIExceptionListOsTypeMacos SecurityEndpointExceptionsAPIExceptionListOsType = "macos" + SecurityEndpointExceptionsAPIExceptionListOsTypeWindows SecurityEndpointExceptionsAPIExceptionListOsType = "windows" +) -// AgentPolicyPackagePolicies1Inputs0 defines model for . -type AgentPolicyPackagePolicies1Inputs0 = []struct { - CompiledInput interface{} `json:"compiled_input"` +// Defines values for SecurityEndpointExceptionsAPIExceptionListType. +const ( + SecurityEndpointExceptionsAPIExceptionListTypeDetection SecurityEndpointExceptionsAPIExceptionListType = "detection" + SecurityEndpointExceptionsAPIExceptionListTypeEndpoint SecurityEndpointExceptionsAPIExceptionListType = "endpoint" + SecurityEndpointExceptionsAPIExceptionListTypeEndpointBlocklists SecurityEndpointExceptionsAPIExceptionListType = "endpoint_blocklists" + SecurityEndpointExceptionsAPIExceptionListTypeEndpointEvents SecurityEndpointExceptionsAPIExceptionListType = "endpoint_events" + SecurityEndpointExceptionsAPIExceptionListTypeEndpointHostIsolationExceptions SecurityEndpointExceptionsAPIExceptionListType = "endpoint_host_isolation_exceptions" + SecurityEndpointExceptionsAPIExceptionListTypeEndpointTrustedApps SecurityEndpointExceptionsAPIExceptionListType = "endpoint_trusted_apps" + SecurityEndpointExceptionsAPIExceptionListTypeEndpointTrustedDevices SecurityEndpointExceptionsAPIExceptionListType = "endpoint_trusted_devices" + SecurityEndpointExceptionsAPIExceptionListTypeRuleDefault SecurityEndpointExceptionsAPIExceptionListType = "rule_default" +) - // Config Package variable (see integration documentation for more information) - Config *map[string]struct { - Frozen *bool `json:"frozen,omitempty"` - Type *string `json:"type,omitempty"` - Value interface{} `json:"value"` - } `json:"config,omitempty"` - Enabled bool `json:"enabled"` - Id *string `json:"id,omitempty"` - KeepEnabled *bool `json:"keep_enabled,omitempty"` - PolicyTemplate *string `json:"policy_template,omitempty"` - Streams []struct { - CompiledStream interface{} `json:"compiled_stream"` +// Defines values for SecurityEndpointExceptionsAPIExceptionNamespaceType. +const ( + SecurityEndpointExceptionsAPIExceptionNamespaceTypeAgnostic SecurityEndpointExceptionsAPIExceptionNamespaceType = "agnostic" + SecurityEndpointExceptionsAPIExceptionNamespaceTypeSingle SecurityEndpointExceptionsAPIExceptionNamespaceType = "single" +) - // Config Package variable (see integration documentation for more information) - Config *map[string]struct { - Frozen *bool `json:"frozen,omitempty"` - Type *string `json:"type,omitempty"` - Value interface{} `json:"value"` - } `json:"config,omitempty"` - DataStream struct { - Dataset string `json:"dataset"` - Elasticsearch *struct { - DynamicDataset *bool `json:"dynamic_dataset,omitempty"` - DynamicNamespace *bool `json:"dynamic_namespace,omitempty"` - Privileges *struct { - Indices *[]string `json:"indices,omitempty"` - } `json:"privileges,omitempty"` - } `json:"elasticsearch,omitempty"` - Type string `json:"type"` - } `json:"data_stream"` - Enabled bool `json:"enabled"` - Id *string `json:"id,omitempty"` - KeepEnabled *bool `json:"keep_enabled,omitempty"` - Release *AgentPolicyPackagePolicies1Inputs0StreamsRelease `json:"release,omitempty"` +// Defines values for SecurityEndpointExceptionsAPIListType. +const ( + SecurityEndpointExceptionsAPIListTypeBinary SecurityEndpointExceptionsAPIListType = "binary" + SecurityEndpointExceptionsAPIListTypeBoolean SecurityEndpointExceptionsAPIListType = "boolean" + SecurityEndpointExceptionsAPIListTypeByte SecurityEndpointExceptionsAPIListType = "byte" + SecurityEndpointExceptionsAPIListTypeDate SecurityEndpointExceptionsAPIListType = "date" + SecurityEndpointExceptionsAPIListTypeDateNanos SecurityEndpointExceptionsAPIListType = "date_nanos" + SecurityEndpointExceptionsAPIListTypeDateRange SecurityEndpointExceptionsAPIListType = "date_range" + SecurityEndpointExceptionsAPIListTypeDouble SecurityEndpointExceptionsAPIListType = "double" + SecurityEndpointExceptionsAPIListTypeDoubleRange SecurityEndpointExceptionsAPIListType = "double_range" + SecurityEndpointExceptionsAPIListTypeFloat SecurityEndpointExceptionsAPIListType = "float" + SecurityEndpointExceptionsAPIListTypeFloatRange SecurityEndpointExceptionsAPIListType = "float_range" + SecurityEndpointExceptionsAPIListTypeGeoPoint SecurityEndpointExceptionsAPIListType = "geo_point" + SecurityEndpointExceptionsAPIListTypeGeoShape SecurityEndpointExceptionsAPIListType = "geo_shape" + SecurityEndpointExceptionsAPIListTypeHalfFloat SecurityEndpointExceptionsAPIListType = "half_float" + SecurityEndpointExceptionsAPIListTypeInteger SecurityEndpointExceptionsAPIListType = "integer" + SecurityEndpointExceptionsAPIListTypeIntegerRange SecurityEndpointExceptionsAPIListType = "integer_range" + SecurityEndpointExceptionsAPIListTypeIp SecurityEndpointExceptionsAPIListType = "ip" + SecurityEndpointExceptionsAPIListTypeIpRange SecurityEndpointExceptionsAPIListType = "ip_range" + SecurityEndpointExceptionsAPIListTypeKeyword SecurityEndpointExceptionsAPIListType = "keyword" + SecurityEndpointExceptionsAPIListTypeLong SecurityEndpointExceptionsAPIListType = "long" + SecurityEndpointExceptionsAPIListTypeLongRange SecurityEndpointExceptionsAPIListType = "long_range" + SecurityEndpointExceptionsAPIListTypeShape SecurityEndpointExceptionsAPIListType = "shape" + SecurityEndpointExceptionsAPIListTypeShort SecurityEndpointExceptionsAPIListType = "short" + SecurityEndpointExceptionsAPIListTypeText SecurityEndpointExceptionsAPIListType = "text" +) - // Vars Package variable (see integration documentation for more information) - Vars *map[string]struct { - Frozen *bool `json:"frozen,omitempty"` - Type *string `json:"type,omitempty"` - Value interface{} `json:"value"` - } `json:"vars,omitempty"` - } `json:"streams"` - Type string `json:"type"` +// Defines values for SecurityEndpointManagementAPIAgentTypes. +const ( + SecurityEndpointManagementAPIAgentTypesCrowdstrike SecurityEndpointManagementAPIAgentTypes = "crowdstrike" + SecurityEndpointManagementAPIAgentTypesEndpoint SecurityEndpointManagementAPIAgentTypes = "endpoint" + SecurityEndpointManagementAPIAgentTypesMicrosoftDefenderEndpoint SecurityEndpointManagementAPIAgentTypes = "microsoft_defender_endpoint" + SecurityEndpointManagementAPIAgentTypesSentinelOne SecurityEndpointManagementAPIAgentTypes = "sentinel_one" +) - // Vars Package variable (see integration documentation for more information) - Vars *map[string]struct { - Frozen *bool `json:"frozen,omitempty"` - Type *string `json:"type,omitempty"` - Value interface{} `json:"value"` - } `json:"vars,omitempty"` -} +// Defines values for SecurityEndpointManagementAPICommand. +const ( + SecurityEndpointManagementAPICommandExecute SecurityEndpointManagementAPICommand = "execute" + SecurityEndpointManagementAPICommandGetFile SecurityEndpointManagementAPICommand = "get-file" + SecurityEndpointManagementAPICommandIsolate SecurityEndpointManagementAPICommand = "isolate" + SecurityEndpointManagementAPICommandKillProcess SecurityEndpointManagementAPICommand = "kill-process" + SecurityEndpointManagementAPICommandRunningProcesses SecurityEndpointManagementAPICommand = "running-processes" + SecurityEndpointManagementAPICommandScan SecurityEndpointManagementAPICommand = "scan" + SecurityEndpointManagementAPICommandSuspendProcess SecurityEndpointManagementAPICommand = "suspend-process" + SecurityEndpointManagementAPICommandUnisolate SecurityEndpointManagementAPICommand = "unisolate" + SecurityEndpointManagementAPICommandUpload SecurityEndpointManagementAPICommand = "upload" +) -// AgentPolicyPackagePolicies1Inputs0StreamsRelease defines model for AgentPolicy.PackagePolicies.1.Inputs.0.Streams.Release. -type AgentPolicyPackagePolicies1Inputs0StreamsRelease string +// Defines values for SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsType. +const ( + SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsTypeJson SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsType = "json" + SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsTypeText SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsType = "text" +) -// AgentPolicyPackagePolicies1Inputs1 Package policy inputs (see integration documentation to know what inputs are available) -type AgentPolicyPackagePolicies1Inputs1 map[string]struct { - // Enabled enable or disable that input, (default to true) - Enabled *bool `json:"enabled,omitempty"` +// Defines values for SecurityEndpointManagementAPISortDirection. +const ( + SecurityEndpointManagementAPISortDirectionAsc SecurityEndpointManagementAPISortDirection = "asc" + SecurityEndpointManagementAPISortDirectionDesc SecurityEndpointManagementAPISortDirection = "desc" +) - // Streams Input streams (see integration documentation to know what streams are available) - Streams *map[string]struct { - // Enabled enable or disable that stream, (default to true) - Enabled *bool `json:"enabled,omitempty"` +// Defines values for SecurityEndpointManagementAPISortField. +const ( + EnrolledAt SecurityEndpointManagementAPISortField = "enrolled_at" + HostStatus SecurityEndpointManagementAPISortField = "host_status" + LastCheckin SecurityEndpointManagementAPISortField = "last_checkin" + MetadataAgentVersion SecurityEndpointManagementAPISortField = "metadata.agent.version" + MetadataEndpointPolicyAppliedName SecurityEndpointManagementAPISortField = "metadata.Endpoint.policy.applied.name" + MetadataEndpointPolicyAppliedStatus SecurityEndpointManagementAPISortField = "metadata.Endpoint.policy.applied.status" + MetadataHostHostname SecurityEndpointManagementAPISortField = "metadata.host.hostname" + MetadataHostIp SecurityEndpointManagementAPISortField = "metadata.host.ip" + MetadataHostOsName SecurityEndpointManagementAPISortField = "metadata.host.os.name" +) - // Vars Input/stream level variable (see integration documentation for more information) - Vars *map[string]*AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties `json:"vars,omitempty"` - } `json:"streams,omitempty"` +// Defines values for SecurityEndpointManagementAPIType. +const ( + Automated SecurityEndpointManagementAPIType = "automated" + Manual SecurityEndpointManagementAPIType = "manual" +) - // Vars Input/stream level variable (see integration documentation for more information) - Vars *map[string]*AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties `json:"vars,omitempty"` -} +// Defines values for SecurityEntityAnalyticsAPIAssetCriticalityLevel. +const ( + SecurityEntityAnalyticsAPIAssetCriticalityLevelExtremeImpact SecurityEntityAnalyticsAPIAssetCriticalityLevel = "extreme_impact" + SecurityEntityAnalyticsAPIAssetCriticalityLevelHighImpact SecurityEntityAnalyticsAPIAssetCriticalityLevel = "high_impact" + SecurityEntityAnalyticsAPIAssetCriticalityLevelLowImpact SecurityEntityAnalyticsAPIAssetCriticalityLevel = "low_impact" + SecurityEntityAnalyticsAPIAssetCriticalityLevelMediumImpact SecurityEntityAnalyticsAPIAssetCriticalityLevel = "medium_impact" +) -// AgentPolicyPackagePolicies1Inputs1StreamsVars0 defines model for . -type AgentPolicyPackagePolicies1Inputs1StreamsVars0 = bool +// Defines values for SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUpload. +const ( + SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUploadExtremeImpact SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUpload = "extreme_impact" + SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUploadHighImpact SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUpload = "high_impact" + SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUploadLowImpact SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUpload = "low_impact" + SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUploadMediumImpact SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUpload = "medium_impact" + SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUploadUnassigned SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUpload = "unassigned" +) -// AgentPolicyPackagePolicies1Inputs1StreamsVars1 defines model for . -type AgentPolicyPackagePolicies1Inputs1StreamsVars1 = string +// Defines values for SecurityEntityAnalyticsAPIEngineComponentResource. +const ( + SecurityEntityAnalyticsAPIEngineComponentResourceComponentTemplate SecurityEntityAnalyticsAPIEngineComponentResource = "component_template" + SecurityEntityAnalyticsAPIEngineComponentResourceEnrichPolicy SecurityEntityAnalyticsAPIEngineComponentResource = "enrich_policy" + SecurityEntityAnalyticsAPIEngineComponentResourceEntityDefinition SecurityEntityAnalyticsAPIEngineComponentResource = "entity_definition" + SecurityEntityAnalyticsAPIEngineComponentResourceEntityEngine SecurityEntityAnalyticsAPIEngineComponentResource = "entity_engine" + SecurityEntityAnalyticsAPIEngineComponentResourceIndex SecurityEntityAnalyticsAPIEngineComponentResource = "index" + SecurityEntityAnalyticsAPIEngineComponentResourceIndexTemplate SecurityEntityAnalyticsAPIEngineComponentResource = "index_template" + SecurityEntityAnalyticsAPIEngineComponentResourceIngestPipeline SecurityEntityAnalyticsAPIEngineComponentResource = "ingest_pipeline" + SecurityEntityAnalyticsAPIEngineComponentResourceTask SecurityEntityAnalyticsAPIEngineComponentResource = "task" + SecurityEntityAnalyticsAPIEngineComponentResourceTransform SecurityEntityAnalyticsAPIEngineComponentResource = "transform" +) -// AgentPolicyPackagePolicies1Inputs1StreamsVars2 defines model for . -type AgentPolicyPackagePolicies1Inputs1StreamsVars2 = float32 +// Defines values for SecurityEntityAnalyticsAPIEngineComponentStatusHealth. +const ( + SecurityEntityAnalyticsAPIEngineComponentStatusHealthGreen SecurityEntityAnalyticsAPIEngineComponentStatusHealth = "green" + SecurityEntityAnalyticsAPIEngineComponentStatusHealthRed SecurityEntityAnalyticsAPIEngineComponentStatusHealth = "red" + SecurityEntityAnalyticsAPIEngineComponentStatusHealthUnavailable SecurityEntityAnalyticsAPIEngineComponentStatusHealth = "unavailable" + SecurityEntityAnalyticsAPIEngineComponentStatusHealthUnknown SecurityEntityAnalyticsAPIEngineComponentStatusHealth = "unknown" + SecurityEntityAnalyticsAPIEngineComponentStatusHealthYellow SecurityEntityAnalyticsAPIEngineComponentStatusHealth = "yellow" +) -// AgentPolicyPackagePolicies1Inputs1StreamsVars3 defines model for . -type AgentPolicyPackagePolicies1Inputs1StreamsVars3 = []string +// Defines values for SecurityEntityAnalyticsAPIEngineDescriptorErrorAction. +const ( + Init SecurityEntityAnalyticsAPIEngineDescriptorErrorAction = "init" +) -// AgentPolicyPackagePolicies1Inputs1StreamsVars4 defines model for . -type AgentPolicyPackagePolicies1Inputs1StreamsVars4 = []float32 +// Defines values for SecurityEntityAnalyticsAPIEngineStatus. +const ( + SecurityEntityAnalyticsAPIEngineStatusError SecurityEntityAnalyticsAPIEngineStatus = "error" + SecurityEntityAnalyticsAPIEngineStatusInstalling SecurityEntityAnalyticsAPIEngineStatus = "installing" + SecurityEntityAnalyticsAPIEngineStatusStarted SecurityEntityAnalyticsAPIEngineStatus = "started" + SecurityEntityAnalyticsAPIEngineStatusStopped SecurityEntityAnalyticsAPIEngineStatus = "stopped" + SecurityEntityAnalyticsAPIEngineStatusUpdating SecurityEntityAnalyticsAPIEngineStatus = "updating" +) -// AgentPolicyPackagePolicies1Inputs1StreamsVars5 defines model for . -type AgentPolicyPackagePolicies1Inputs1StreamsVars5 struct { - Id string `json:"id"` - IsSecretRef bool `json:"isSecretRef"` -} +// Defines values for SecurityEntityAnalyticsAPIEntityRiskLevels. +const ( + SecurityEntityAnalyticsAPIEntityRiskLevelsCritical SecurityEntityAnalyticsAPIEntityRiskLevels = "Critical" + SecurityEntityAnalyticsAPIEntityRiskLevelsHigh SecurityEntityAnalyticsAPIEntityRiskLevels = "High" + SecurityEntityAnalyticsAPIEntityRiskLevelsLow SecurityEntityAnalyticsAPIEntityRiskLevels = "Low" + SecurityEntityAnalyticsAPIEntityRiskLevelsModerate SecurityEntityAnalyticsAPIEntityRiskLevels = "Moderate" + SecurityEntityAnalyticsAPIEntityRiskLevelsUnknown SecurityEntityAnalyticsAPIEntityRiskLevels = "Unknown" +) -// AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties defines model for AgentPolicy.PackagePolicies.1.Inputs.1.Streams.Vars.AdditionalProperties. -type AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties struct { - union json.RawMessage -} +// Defines values for SecurityEntityAnalyticsAPIEntityType. +const ( + SecurityEntityAnalyticsAPIEntityTypeGeneric SecurityEntityAnalyticsAPIEntityType = "generic" + SecurityEntityAnalyticsAPIEntityTypeHost SecurityEntityAnalyticsAPIEntityType = "host" + SecurityEntityAnalyticsAPIEntityTypeService SecurityEntityAnalyticsAPIEntityType = "service" + SecurityEntityAnalyticsAPIEntityTypeUser SecurityEntityAnalyticsAPIEntityType = "user" +) -// AgentPolicyPackagePolicies1Inputs1Vars0 defines model for . -type AgentPolicyPackagePolicies1Inputs1Vars0 = bool +// Defines values for SecurityEntityAnalyticsAPIIdField. +const ( + SecurityEntityAnalyticsAPIIdFieldEntityId SecurityEntityAnalyticsAPIIdField = "entity.id" + SecurityEntityAnalyticsAPIIdFieldHostName SecurityEntityAnalyticsAPIIdField = "host.name" + SecurityEntityAnalyticsAPIIdFieldServiceName SecurityEntityAnalyticsAPIIdField = "service.name" + SecurityEntityAnalyticsAPIIdFieldUserName SecurityEntityAnalyticsAPIIdField = "user.name" +) -// AgentPolicyPackagePolicies1Inputs1Vars1 defines model for . -type AgentPolicyPackagePolicies1Inputs1Vars1 = string +// Defines values for SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatus. +const ( + SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatusDisabled SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatus = "disabled" + SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatusError SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatus = "error" + SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatusNotInstalled SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatus = "not_installed" + SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatusStarted SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatus = "started" +) -// AgentPolicyPackagePolicies1Inputs1Vars2 defines model for . -type AgentPolicyPackagePolicies1Inputs1Vars2 = float32 +// Defines values for SecurityEntityAnalyticsAPIStoreStatus. +const ( + SecurityEntityAnalyticsAPIStoreStatusError SecurityEntityAnalyticsAPIStoreStatus = "error" + SecurityEntityAnalyticsAPIStoreStatusInstalling SecurityEntityAnalyticsAPIStoreStatus = "installing" + SecurityEntityAnalyticsAPIStoreStatusNotInstalled SecurityEntityAnalyticsAPIStoreStatus = "not_installed" + SecurityEntityAnalyticsAPIStoreStatusRunning SecurityEntityAnalyticsAPIStoreStatus = "running" + SecurityEntityAnalyticsAPIStoreStatusStopped SecurityEntityAnalyticsAPIStoreStatus = "stopped" +) -// AgentPolicyPackagePolicies1Inputs1Vars3 defines model for . -type AgentPolicyPackagePolicies1Inputs1Vars3 = []string +// Defines values for SecurityExceptionsAPIExceptionListItemEntryExistsType. +const ( + SecurityExceptionsAPIExceptionListItemEntryExistsTypeExists SecurityExceptionsAPIExceptionListItemEntryExistsType = "exists" +) -// AgentPolicyPackagePolicies1Inputs1Vars4 defines model for . -type AgentPolicyPackagePolicies1Inputs1Vars4 = []float32 +// Defines values for SecurityExceptionsAPIExceptionListItemEntryListType. +const ( + SecurityExceptionsAPIExceptionListItemEntryListTypeList SecurityExceptionsAPIExceptionListItemEntryListType = "list" +) -// AgentPolicyPackagePolicies1Inputs1Vars5 defines model for . -type AgentPolicyPackagePolicies1Inputs1Vars5 struct { - Id string `json:"id"` - IsSecretRef bool `json:"isSecretRef"` -} +// Defines values for SecurityExceptionsAPIExceptionListItemEntryMatchType. +const ( + SecurityExceptionsAPIExceptionListItemEntryMatchTypeMatch SecurityExceptionsAPIExceptionListItemEntryMatchType = "match" +) -// AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties defines model for AgentPolicy.PackagePolicies.1.Inputs.1.Vars.AdditionalProperties. -type AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties struct { - union json.RawMessage -} +// Defines values for SecurityExceptionsAPIExceptionListItemEntryMatchAnyType. +const ( + SecurityExceptionsAPIExceptionListItemEntryMatchAnyTypeMatchAny SecurityExceptionsAPIExceptionListItemEntryMatchAnyType = "match_any" +) -// AgentPolicy_PackagePolicies_1_Inputs defines model for AgentPolicy.PackagePolicies.1.Inputs. -type AgentPolicy_PackagePolicies_1_Inputs struct { - union json.RawMessage -} +// Defines values for SecurityExceptionsAPIExceptionListItemEntryMatchWildcardType. +const ( + SecurityExceptionsAPIExceptionListItemEntryMatchWildcardTypeWildcard SecurityExceptionsAPIExceptionListItemEntryMatchWildcardType = "wildcard" +) -// AgentPolicyPackagePolicies1Vars0 Package variable (see integration documentation for more information) -type AgentPolicyPackagePolicies1Vars0 map[string]struct { - Frozen *bool `json:"frozen,omitempty"` - Type *string `json:"type,omitempty"` - Value interface{} `json:"value"` -} +// Defines values for SecurityExceptionsAPIExceptionListItemEntryNestedType. +const ( + SecurityExceptionsAPIExceptionListItemEntryNestedTypeNested SecurityExceptionsAPIExceptionListItemEntryNestedType = "nested" +) -// AgentPolicyPackagePolicies1Vars1 Input/stream level variable (see integration documentation for more information) -type AgentPolicyPackagePolicies1Vars1 map[string]*AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties +// Defines values for SecurityExceptionsAPIExceptionListItemEntryOperator. +const ( + SecurityExceptionsAPIExceptionListItemEntryOperatorExcluded SecurityExceptionsAPIExceptionListItemEntryOperator = "excluded" + SecurityExceptionsAPIExceptionListItemEntryOperatorIncluded SecurityExceptionsAPIExceptionListItemEntryOperator = "included" +) -// AgentPolicyPackagePolicies1Vars10 defines model for . -type AgentPolicyPackagePolicies1Vars10 = bool +// Defines values for SecurityExceptionsAPIExceptionListItemType. +const ( + SecurityExceptionsAPIExceptionListItemTypeSimple SecurityExceptionsAPIExceptionListItemType = "simple" +) -// AgentPolicyPackagePolicies1Vars11 defines model for . -type AgentPolicyPackagePolicies1Vars11 = string +// Defines values for SecurityExceptionsAPIExceptionListOsType. +const ( + SecurityExceptionsAPIExceptionListOsTypeLinux SecurityExceptionsAPIExceptionListOsType = "linux" + SecurityExceptionsAPIExceptionListOsTypeMacos SecurityExceptionsAPIExceptionListOsType = "macos" + SecurityExceptionsAPIExceptionListOsTypeWindows SecurityExceptionsAPIExceptionListOsType = "windows" +) -// AgentPolicyPackagePolicies1Vars12 defines model for . -type AgentPolicyPackagePolicies1Vars12 = float32 +// Defines values for SecurityExceptionsAPIExceptionListType. +const ( + SecurityExceptionsAPIExceptionListTypeDetection SecurityExceptionsAPIExceptionListType = "detection" + SecurityExceptionsAPIExceptionListTypeEndpoint SecurityExceptionsAPIExceptionListType = "endpoint" + SecurityExceptionsAPIExceptionListTypeEndpointBlocklists SecurityExceptionsAPIExceptionListType = "endpoint_blocklists" + SecurityExceptionsAPIExceptionListTypeEndpointEvents SecurityExceptionsAPIExceptionListType = "endpoint_events" + SecurityExceptionsAPIExceptionListTypeEndpointHostIsolationExceptions SecurityExceptionsAPIExceptionListType = "endpoint_host_isolation_exceptions" + SecurityExceptionsAPIExceptionListTypeEndpointTrustedApps SecurityExceptionsAPIExceptionListType = "endpoint_trusted_apps" + SecurityExceptionsAPIExceptionListTypeEndpointTrustedDevices SecurityExceptionsAPIExceptionListType = "endpoint_trusted_devices" + SecurityExceptionsAPIExceptionListTypeRuleDefault SecurityExceptionsAPIExceptionListType = "rule_default" +) -// AgentPolicyPackagePolicies1Vars13 defines model for . -type AgentPolicyPackagePolicies1Vars13 = []string +// Defines values for SecurityExceptionsAPIExceptionNamespaceType. +const ( + Agnostic SecurityExceptionsAPIExceptionNamespaceType = "agnostic" + Single SecurityExceptionsAPIExceptionNamespaceType = "single" +) -// AgentPolicyPackagePolicies1Vars14 defines model for . -type AgentPolicyPackagePolicies1Vars14 = []float32 +// Defines values for SecurityExceptionsAPIListType. +const ( + SecurityExceptionsAPIListTypeBinary SecurityExceptionsAPIListType = "binary" + SecurityExceptionsAPIListTypeBoolean SecurityExceptionsAPIListType = "boolean" + SecurityExceptionsAPIListTypeByte SecurityExceptionsAPIListType = "byte" + SecurityExceptionsAPIListTypeDate SecurityExceptionsAPIListType = "date" + SecurityExceptionsAPIListTypeDateNanos SecurityExceptionsAPIListType = "date_nanos" + SecurityExceptionsAPIListTypeDateRange SecurityExceptionsAPIListType = "date_range" + SecurityExceptionsAPIListTypeDouble SecurityExceptionsAPIListType = "double" + SecurityExceptionsAPIListTypeDoubleRange SecurityExceptionsAPIListType = "double_range" + SecurityExceptionsAPIListTypeFloat SecurityExceptionsAPIListType = "float" + SecurityExceptionsAPIListTypeFloatRange SecurityExceptionsAPIListType = "float_range" + SecurityExceptionsAPIListTypeGeoPoint SecurityExceptionsAPIListType = "geo_point" + SecurityExceptionsAPIListTypeGeoShape SecurityExceptionsAPIListType = "geo_shape" + SecurityExceptionsAPIListTypeHalfFloat SecurityExceptionsAPIListType = "half_float" + SecurityExceptionsAPIListTypeInteger SecurityExceptionsAPIListType = "integer" + SecurityExceptionsAPIListTypeIntegerRange SecurityExceptionsAPIListType = "integer_range" + SecurityExceptionsAPIListTypeIp SecurityExceptionsAPIListType = "ip" + SecurityExceptionsAPIListTypeIpRange SecurityExceptionsAPIListType = "ip_range" + SecurityExceptionsAPIListTypeKeyword SecurityExceptionsAPIListType = "keyword" + SecurityExceptionsAPIListTypeLong SecurityExceptionsAPIListType = "long" + SecurityExceptionsAPIListTypeLongRange SecurityExceptionsAPIListType = "long_range" + SecurityExceptionsAPIListTypeShape SecurityExceptionsAPIListType = "shape" + SecurityExceptionsAPIListTypeShort SecurityExceptionsAPIListType = "short" + SecurityExceptionsAPIListTypeText SecurityExceptionsAPIListType = "text" +) -// AgentPolicyPackagePolicies1Vars15 defines model for . -type AgentPolicyPackagePolicies1Vars15 struct { - Id string `json:"id"` - IsSecretRef bool `json:"isSecretRef"` -} +// Defines values for SecurityListsAPIListType. +const ( + SecurityListsAPIListTypeBinary SecurityListsAPIListType = "binary" + SecurityListsAPIListTypeBoolean SecurityListsAPIListType = "boolean" + SecurityListsAPIListTypeByte SecurityListsAPIListType = "byte" + SecurityListsAPIListTypeDate SecurityListsAPIListType = "date" + SecurityListsAPIListTypeDateNanos SecurityListsAPIListType = "date_nanos" + SecurityListsAPIListTypeDateRange SecurityListsAPIListType = "date_range" + SecurityListsAPIListTypeDouble SecurityListsAPIListType = "double" + SecurityListsAPIListTypeDoubleRange SecurityListsAPIListType = "double_range" + SecurityListsAPIListTypeFloat SecurityListsAPIListType = "float" + SecurityListsAPIListTypeFloatRange SecurityListsAPIListType = "float_range" + SecurityListsAPIListTypeGeoPoint SecurityListsAPIListType = "geo_point" + SecurityListsAPIListTypeGeoShape SecurityListsAPIListType = "geo_shape" + SecurityListsAPIListTypeHalfFloat SecurityListsAPIListType = "half_float" + SecurityListsAPIListTypeInteger SecurityListsAPIListType = "integer" + SecurityListsAPIListTypeIntegerRange SecurityListsAPIListType = "integer_range" + SecurityListsAPIListTypeIp SecurityListsAPIListType = "ip" + SecurityListsAPIListTypeIpRange SecurityListsAPIListType = "ip_range" + SecurityListsAPIListTypeKeyword SecurityListsAPIListType = "keyword" + SecurityListsAPIListTypeLong SecurityListsAPIListType = "long" + SecurityListsAPIListTypeLongRange SecurityListsAPIListType = "long_range" + SecurityListsAPIListTypeShape SecurityListsAPIListType = "shape" + SecurityListsAPIListTypeShort SecurityListsAPIListType = "short" + SecurityListsAPIListTypeText SecurityListsAPIListType = "text" +) -// AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties defines model for AgentPolicy.PackagePolicies.1.Vars.1.AdditionalProperties. -type AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties struct { - union json.RawMessage -} +// Defines values for SecurityOsqueryAPISortOrderOrUndefined. +const ( + SecurityOsqueryAPISortOrderOrUndefinedAsc SecurityOsqueryAPISortOrderOrUndefined = "asc" + SecurityOsqueryAPISortOrderOrUndefinedDesc SecurityOsqueryAPISortOrderOrUndefined = "desc" +) -// AgentPolicy_PackagePolicies_1_Vars defines model for AgentPolicy.PackagePolicies.1.Vars. -type AgentPolicy_PackagePolicies_1_Vars struct { - union json.RawMessage -} +// Defines values for SecurityTimelineAPIAssociatedFilterType. +const ( + SecurityTimelineAPIAssociatedFilterTypeAll SecurityTimelineAPIAssociatedFilterType = "all" + SecurityTimelineAPIAssociatedFilterTypeDocumentAndSavedObject SecurityTimelineAPIAssociatedFilterType = "document_and_saved_object" + SecurityTimelineAPIAssociatedFilterTypeDocumentOnly SecurityTimelineAPIAssociatedFilterType = "document_only" + SecurityTimelineAPIAssociatedFilterTypeOrphan SecurityTimelineAPIAssociatedFilterType = "orphan" + SecurityTimelineAPIAssociatedFilterTypeSavedObjectOnly SecurityTimelineAPIAssociatedFilterType = "saved_object_only" +) -// AgentPolicy_PackagePolicies defines model for AgentPolicy.PackagePolicies. -type AgentPolicy_PackagePolicies struct { - union json.RawMessage -} +// Defines values for SecurityTimelineAPIDataProviderType. +const ( + SecurityTimelineAPIDataProviderTypeDefault SecurityTimelineAPIDataProviderType = "default" + SecurityTimelineAPIDataProviderTypeTemplate SecurityTimelineAPIDataProviderType = "template" +) -// AgentPolicyStatus defines model for AgentPolicy.Status. -type AgentPolicyStatus string +// Defines values for SecurityTimelineAPIRowRendererId. +const ( + SecurityTimelineAPIRowRendererIdAlert SecurityTimelineAPIRowRendererId = "alert" + SecurityTimelineAPIRowRendererIdAlerts SecurityTimelineAPIRowRendererId = "alerts" + SecurityTimelineAPIRowRendererIdAuditd SecurityTimelineAPIRowRendererId = "auditd" + SecurityTimelineAPIRowRendererIdAuditdFile SecurityTimelineAPIRowRendererId = "auditd_file" + SecurityTimelineAPIRowRendererIdLibrary SecurityTimelineAPIRowRendererId = "library" + SecurityTimelineAPIRowRendererIdNetflow SecurityTimelineAPIRowRendererId = "netflow" + SecurityTimelineAPIRowRendererIdPlain SecurityTimelineAPIRowRendererId = "plain" + SecurityTimelineAPIRowRendererIdRegistry SecurityTimelineAPIRowRendererId = "registry" + SecurityTimelineAPIRowRendererIdSuricata SecurityTimelineAPIRowRendererId = "suricata" + SecurityTimelineAPIRowRendererIdSystem SecurityTimelineAPIRowRendererId = "system" + SecurityTimelineAPIRowRendererIdSystemDns SecurityTimelineAPIRowRendererId = "system_dns" + SecurityTimelineAPIRowRendererIdSystemEndgameProcess SecurityTimelineAPIRowRendererId = "system_endgame_process" + SecurityTimelineAPIRowRendererIdSystemFile SecurityTimelineAPIRowRendererId = "system_file" + SecurityTimelineAPIRowRendererIdSystemFim SecurityTimelineAPIRowRendererId = "system_fim" + SecurityTimelineAPIRowRendererIdSystemSecurityEvent SecurityTimelineAPIRowRendererId = "system_security_event" + SecurityTimelineAPIRowRendererIdSystemSocket SecurityTimelineAPIRowRendererId = "system_socket" + SecurityTimelineAPIRowRendererIdThreatMatch SecurityTimelineAPIRowRendererId = "threat_match" + SecurityTimelineAPIRowRendererIdZeek SecurityTimelineAPIRowRendererId = "zeek" +) -// AgentPolicyGlobalDataTagsItem defines model for agent_policy_global_data_tags_item. -type AgentPolicyGlobalDataTagsItem struct { - Name string `json:"name"` - Value AgentPolicyGlobalDataTagsItem_Value `json:"value"` -} +// Defines values for SecurityTimelineAPISavedObjectResolveAliasPurpose. +const ( + SecurityTimelineAPISavedObjectResolveAliasPurposeSavedObjectConversion SecurityTimelineAPISavedObjectResolveAliasPurpose = "savedObjectConversion" + SecurityTimelineAPISavedObjectResolveAliasPurposeSavedObjectImport SecurityTimelineAPISavedObjectResolveAliasPurpose = "savedObjectImport" +) -// AgentPolicyGlobalDataTagsItemValue0 defines model for . -type AgentPolicyGlobalDataTagsItemValue0 = string +// Defines values for SecurityTimelineAPISavedObjectResolveOutcome. +const ( + SecurityTimelineAPISavedObjectResolveOutcomeAliasMatch SecurityTimelineAPISavedObjectResolveOutcome = "aliasMatch" + SecurityTimelineAPISavedObjectResolveOutcomeConflict SecurityTimelineAPISavedObjectResolveOutcome = "conflict" + SecurityTimelineAPISavedObjectResolveOutcomeExactMatch SecurityTimelineAPISavedObjectResolveOutcome = "exactMatch" +) -// AgentPolicyGlobalDataTagsItemValue1 defines model for . -type AgentPolicyGlobalDataTagsItemValue1 = float32 +// Defines values for SecurityTimelineAPISortFieldTimeline. +const ( + SecurityTimelineAPISortFieldTimelineCreated SecurityTimelineAPISortFieldTimeline = "created" + SecurityTimelineAPISortFieldTimelineDescription SecurityTimelineAPISortFieldTimeline = "description" + SecurityTimelineAPISortFieldTimelineTitle SecurityTimelineAPISortFieldTimeline = "title" + SecurityTimelineAPISortFieldTimelineUpdated SecurityTimelineAPISortFieldTimeline = "updated" +) -// AgentPolicyGlobalDataTagsItem_Value defines model for AgentPolicyGlobalDataTagsItem.Value. -type AgentPolicyGlobalDataTagsItem_Value struct { - union json.RawMessage -} +// Defines values for SecurityTimelineAPITimelineStatus. +const ( + SecurityTimelineAPITimelineStatusActive SecurityTimelineAPITimelineStatus = "active" + SecurityTimelineAPITimelineStatusDraft SecurityTimelineAPITimelineStatus = "draft" + SecurityTimelineAPITimelineStatusImmutable SecurityTimelineAPITimelineStatus = "immutable" +) -// AuthType The type of authentication to use: basic, SSL, or none. -type AuthType string +// Defines values for SecurityTimelineAPITimelineType. +const ( + SecurityTimelineAPITimelineTypeDefault SecurityTimelineAPITimelineType = "default" + SecurityTimelineAPITimelineTypeTemplate SecurityTimelineAPITimelineType = "template" +) -// BedrockConfig Defines properties for connectors when type is `.bedrock`. -type BedrockConfig struct { - // ApiUrl The Amazon Bedrock request URL. - ApiUrl string `json:"apiUrl"` +// Defines values for SyntheticsBrowserMonitorFieldsScreenshots. +const ( + Off SyntheticsBrowserMonitorFieldsScreenshots = "off" + On SyntheticsBrowserMonitorFieldsScreenshots = "on" + OnlyOnFailure SyntheticsBrowserMonitorFieldsScreenshots = "only-on-failure" +) - // DefaultModel The generative artificial intelligence model for Amazon Bedrock to use. Current support is for the Anthropic Claude models. - DefaultModel *string `json:"defaultModel,omitempty"` -} +// Defines values for SyntheticsBrowserMonitorFieldsType. +const ( + SyntheticsBrowserMonitorFieldsTypeBrowser SyntheticsBrowserMonitorFieldsType = "browser" +) -// BedrockSecrets Defines secrets for connectors when type is `.bedrock`. -type BedrockSecrets struct { - // AccessKey The AWS access key for authentication. - AccessKey string `json:"accessKey"` +// Defines values for SyntheticsHttpMonitorFieldsCheckRequestMethod. +const ( + GET SyntheticsHttpMonitorFieldsCheckRequestMethod = "GET" + HEAD SyntheticsHttpMonitorFieldsCheckRequestMethod = "HEAD" + OPTIONS SyntheticsHttpMonitorFieldsCheckRequestMethod = "OPTIONS" + POST SyntheticsHttpMonitorFieldsCheckRequestMethod = "POST" +) - // Secret The AWS secret for authentication. - Secret string `json:"secret"` -} +// Defines values for SyntheticsHttpMonitorFieldsMode. +const ( + SyntheticsHttpMonitorFieldsModeAll SyntheticsHttpMonitorFieldsMode = "all" + SyntheticsHttpMonitorFieldsModeAny SyntheticsHttpMonitorFieldsMode = "any" +) -// Ca A base64 encoded version of the certificate authority file that the connector can trust to sign and validate certificates. This option is available for all authentication types. -type Ca = string +// Defines values for SyntheticsHttpMonitorFieldsType. +const ( + Http SyntheticsHttpMonitorFieldsType = "http" +) -// CasesWebhookConfig Defines properties for connectors when type is `.cases-webhook`. -type CasesWebhookConfig struct { - // AuthType The type of authentication to use: basic, SSL, or none. - AuthType *AuthType `json:"authType,omitempty"` +// Defines values for SyntheticsIcmpMonitorFieldsType. +const ( + Icmp SyntheticsIcmpMonitorFieldsType = "icmp" +) - // Ca A base64 encoded version of the certificate authority file that the connector can trust to sign and validate certificates. This option is available for all authentication types. - Ca *Ca `json:"ca,omitempty"` +// Defines values for SyntheticsTcpMonitorFieldsType. +const ( + Tcp SyntheticsTcpMonitorFieldsType = "tcp" +) - // CertType If the `authType` is `webhook-authentication-ssl`, specifies whether the certificate authentication data is in a CRT and key file format or a PFX file format. - CertType *CertType `json:"certType,omitempty"` +// Defines values for AgentPolicyMonitoringEnabled. +const ( + AgentPolicyMonitoringEnabledLogs AgentPolicyMonitoringEnabled = "logs" + AgentPolicyMonitoringEnabledMetrics AgentPolicyMonitoringEnabled = "metrics" + AgentPolicyMonitoringEnabledTraces AgentPolicyMonitoringEnabled = "traces" +) - // CreateCommentJson A JSON payload sent to the create comment URL to create a case comment. You can use variables to add Kibana Cases data to the payload. The required variable is `case.comment`. Due to Mustache template variables (the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated once the Mustache variables have been placed when the REST method runs. Manually ensure that the JSON is valid, disregarding the Mustache variables, so the later validation will pass. - CreateCommentJson *string `json:"createCommentJson,omitempty"` +// Defines values for AgentPolicyPackagePolicies1Inputs0StreamsRelease. +const ( + AgentPolicyPackagePolicies1Inputs0StreamsReleaseBeta AgentPolicyPackagePolicies1Inputs0StreamsRelease = "beta" + AgentPolicyPackagePolicies1Inputs0StreamsReleaseExperimental AgentPolicyPackagePolicies1Inputs0StreamsRelease = "experimental" + AgentPolicyPackagePolicies1Inputs0StreamsReleaseGa AgentPolicyPackagePolicies1Inputs0StreamsRelease = "ga" +) - // CreateCommentMethod The REST API HTTP request method to create a case comment in the third-party system. Valid values are `patch`, `post`, and `put`. - CreateCommentMethod *CasesWebhookConfigCreateCommentMethod `json:"createCommentMethod,omitempty"` +// Defines values for AgentPolicyStatus. +const ( + AgentPolicyStatusActive AgentPolicyStatus = "active" + AgentPolicyStatusInactive AgentPolicyStatus = "inactive" +) - // CreateCommentUrl The REST API URL to create a case comment by ID in the third-party system. You can use a variable to add the external system ID to the URL. If you are using the `xpack.actions.allowedHosts setting`, add the hostname to the allowed hosts. - CreateCommentUrl *string `json:"createCommentUrl,omitempty"` +// Defines values for Aggtype. +const ( + AggtypeAvg Aggtype = "avg" + AggtypeCount Aggtype = "count" + AggtypeMax Aggtype = "max" + AggtypeMin Aggtype = "min" + AggtypeSum Aggtype = "sum" +) - // CreateIncidentJson A JSON payload sent to the create case URL to create a case. You can use variables to add case data to the payload. Required variables are `case.title` and `case.description`. Due to Mustache template variables (which is the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid to avoid future validation errors; disregard Mustache variables during your review. - CreateIncidentJson string `json:"createIncidentJson"` +// Defines values for AuthType. +const ( + WebhookAuthenticationBasic AuthType = "webhook-authentication-basic" + WebhookAuthenticationSsl AuthType = "webhook-authentication-ssl" +) - // CreateIncidentMethod The REST API HTTP request method to create a case in the third-party system. Valid values are `patch`, `post`, and `put`. - CreateIncidentMethod *CasesWebhookConfigCreateIncidentMethod `json:"createIncidentMethod,omitempty"` +// Defines values for CasesWebhookConfigCreateCommentMethod. +const ( + CasesWebhookConfigCreateCommentMethodPatch CasesWebhookConfigCreateCommentMethod = "patch" + CasesWebhookConfigCreateCommentMethodPost CasesWebhookConfigCreateCommentMethod = "post" + CasesWebhookConfigCreateCommentMethodPut CasesWebhookConfigCreateCommentMethod = "put" +) - // CreateIncidentResponseKey The JSON key in the create external case response that contains the case ID. - CreateIncidentResponseKey string `json:"createIncidentResponseKey"` +// Defines values for CasesWebhookConfigCreateIncidentMethod. +const ( + CasesWebhookConfigCreateIncidentMethodPatch CasesWebhookConfigCreateIncidentMethod = "patch" + CasesWebhookConfigCreateIncidentMethodPost CasesWebhookConfigCreateIncidentMethod = "post" + CasesWebhookConfigCreateIncidentMethodPut CasesWebhookConfigCreateIncidentMethod = "put" +) - // CreateIncidentUrl The REST API URL to create a case in the third-party system. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - CreateIncidentUrl string `json:"createIncidentUrl"` +// Defines values for CasesWebhookConfigUpdateIncidentMethod. +const ( + CasesWebhookConfigUpdateIncidentMethodPatch CasesWebhookConfigUpdateIncidentMethod = "patch" + CasesWebhookConfigUpdateIncidentMethodPost CasesWebhookConfigUpdateIncidentMethod = "post" + CasesWebhookConfigUpdateIncidentMethodPut CasesWebhookConfigUpdateIncidentMethod = "put" +) - // GetIncidentResponseExternalTitleKey The JSON key in get external case response that contains the case title. - GetIncidentResponseExternalTitleKey string `json:"getIncidentResponseExternalTitleKey"` +// Defines values for CertType. +const ( + SslCrtKey CertType = "ssl-crt-key" + SslPfx CertType = "ssl-pfx" +) - // GetIncidentUrl The REST API URL to get the case by ID from the third-party system. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. You can use a variable to add the external system ID to the URL. Due to Mustache template variables (the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid, disregarding the Mustache variables, so the later validation will pass. - GetIncidentUrl string `json:"getIncidentUrl"` +// Defines values for EmailConfigService. +const ( + EmailConfigServiceElasticCloud EmailConfigService = "elastic_cloud" + EmailConfigServiceExchangeServer EmailConfigService = "exchange_server" + EmailConfigServiceGmail EmailConfigService = "gmail" + EmailConfigServiceOther EmailConfigService = "other" + EmailConfigServiceOutlook365 EmailConfigService = "outlook365" + EmailConfigServiceSes EmailConfigService = "ses" +) - // HasAuth If true, a username and password for login type authentication must be provided. - HasAuth *HasAuth `json:"hasAuth,omitempty"` +// Defines values for GenaiAzureConfigApiProvider. +const ( + AzureOpenAI GenaiAzureConfigApiProvider = "Azure OpenAI" +) - // Headers A set of key-value pairs sent as headers with the request URLs for the create case, update case, get case, and create comment methods. - Headers *string `json:"headers,omitempty"` +// Defines values for GenaiOpenaiConfigApiProvider. +const ( + OpenAI GenaiOpenaiConfigApiProvider = "OpenAI" +) - // UpdateIncidentJson The JSON payload sent to the update case URL to update the case. You can use variables to add Kibana Cases data to the payload. Required variables are `case.title` and `case.description`. Due to Mustache template variables (which is the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid to avoid future validation errors; disregard Mustache variables during your review. - UpdateIncidentJson string `json:"updateIncidentJson"` +// Defines values for GenaiOpenaiOtherConfigApiProvider. +const ( + Other GenaiOpenaiOtherConfigApiProvider = "Other" +) - // UpdateIncidentMethod The REST API HTTP request method to update the case in the third-party system. Valid values are `patch`, `post`, and `put`. - UpdateIncidentMethod *CasesWebhookConfigUpdateIncidentMethod `json:"updateIncidentMethod,omitempty"` +// Defines values for GenaiOpenaiOtherConfigVerificationMode. +const ( + GenaiOpenaiOtherConfigVerificationModeCertificate GenaiOpenaiOtherConfigVerificationMode = "certificate" + GenaiOpenaiOtherConfigVerificationModeFull GenaiOpenaiOtherConfigVerificationMode = "full" + GenaiOpenaiOtherConfigVerificationModeNone GenaiOpenaiOtherConfigVerificationMode = "none" +) - // UpdateIncidentUrl The REST API URL to update the case by ID in the third-party system. You can use a variable to add the external system ID to the URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - UpdateIncidentUrl string `json:"updateIncidentUrl"` +// Defines values for Groupby. +const ( + GroupbyAll Groupby = "all" + GroupbyTop Groupby = "top" +) - // VerificationMode Controls the verification of certificates. Use `full` to validate that the certificate has an issue date within the `not_before` and `not_after` dates, chains to a trusted certificate authority (CA), and has a hostname or IP address that matches the names within the certificate. Use `certificate` to validate the certificate and verify that it is signed by a trusted authority; this option does not check the certificate hostname. Use `none` to skip certificate validation. - VerificationMode *VerificationMode `json:"verificationMode,omitempty"` +// Defines values for NewOutputElasticsearchPreset. +const ( + NewOutputElasticsearchPresetBalanced NewOutputElasticsearchPreset = "balanced" + NewOutputElasticsearchPresetCustom NewOutputElasticsearchPreset = "custom" + NewOutputElasticsearchPresetLatency NewOutputElasticsearchPreset = "latency" + NewOutputElasticsearchPresetScale NewOutputElasticsearchPreset = "scale" + NewOutputElasticsearchPresetThroughput NewOutputElasticsearchPreset = "throughput" +) - // ViewIncidentUrl The URL to view the case in the external system. You can use variables to add the external system ID or external system title to the URL. - ViewIncidentUrl string `json:"viewIncidentUrl"` -} +// Defines values for NewOutputElasticsearchType. +const ( + NewOutputElasticsearchTypeElasticsearch NewOutputElasticsearchType = "elasticsearch" +) -// CasesWebhookConfigCreateCommentMethod The REST API HTTP request method to create a case comment in the third-party system. Valid values are `patch`, `post`, and `put`. -type CasesWebhookConfigCreateCommentMethod string +// Defines values for NewOutputKafkaAuthType. +const ( + NewOutputKafkaAuthTypeKerberos NewOutputKafkaAuthType = "kerberos" + NewOutputKafkaAuthTypeNone NewOutputKafkaAuthType = "none" + NewOutputKafkaAuthTypeSsl NewOutputKafkaAuthType = "ssl" + NewOutputKafkaAuthTypeUserPass NewOutputKafkaAuthType = "user_pass" +) -// CasesWebhookConfigCreateIncidentMethod The REST API HTTP request method to create a case in the third-party system. Valid values are `patch`, `post`, and `put`. -type CasesWebhookConfigCreateIncidentMethod string +// Defines values for NewOutputKafkaCompression. +const ( + NewOutputKafkaCompressionGzip NewOutputKafkaCompression = "gzip" + NewOutputKafkaCompressionLz4 NewOutputKafkaCompression = "lz4" + NewOutputKafkaCompressionNone NewOutputKafkaCompression = "none" + NewOutputKafkaCompressionSnappy NewOutputKafkaCompression = "snappy" +) -// CasesWebhookConfigUpdateIncidentMethod The REST API HTTP request method to update the case in the third-party system. Valid values are `patch`, `post`, and `put`. -type CasesWebhookConfigUpdateIncidentMethod string +// Defines values for NewOutputKafkaPartition. +const ( + NewOutputKafkaPartitionHash NewOutputKafkaPartition = "hash" + NewOutputKafkaPartitionRandom NewOutputKafkaPartition = "random" + NewOutputKafkaPartitionRoundRobin NewOutputKafkaPartition = "round_robin" +) -// CasesWebhookSecrets defines model for cases_webhook_secrets. -type CasesWebhookSecrets struct { - // Crt If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the CRT or CERT file. - Crt *Crt `json:"crt,omitempty"` +// Defines values for NewOutputKafkaRequiredAcks. +const ( + NewOutputKafkaRequiredAcksMinus1 NewOutputKafkaRequiredAcks = -1 + NewOutputKafkaRequiredAcksN0 NewOutputKafkaRequiredAcks = 0 + NewOutputKafkaRequiredAcksN1 NewOutputKafkaRequiredAcks = 1 +) - // Key If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the KEY file. - Key *Key `json:"key,omitempty"` +// Defines values for NewOutputKafkaSaslMechanism. +const ( + NewOutputKafkaSaslMechanismPLAIN NewOutputKafkaSaslMechanism = "PLAIN" + NewOutputKafkaSaslMechanismSCRAMSHA256 NewOutputKafkaSaslMechanism = "SCRAM-SHA-256" + NewOutputKafkaSaslMechanismSCRAMSHA512 NewOutputKafkaSaslMechanism = "SCRAM-SHA-512" +) - // Password The password for HTTP basic authentication. If `hasAuth` is set to `true` and and `authType` is `webhook-authentication-basic`, this property is required. - Password *string `json:"password,omitempty"` +// Defines values for NewOutputKafkaType. +const ( + NewOutputKafkaTypeKafka NewOutputKafkaType = "kafka" +) - // Pfx If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-pfx`, it is a base64 encoded version of the PFX or P12 file. - Pfx *Pfx `json:"pfx,omitempty"` +// Defines values for NewOutputLogstashType. +const ( + NewOutputLogstashTypeLogstash NewOutputLogstashType = "logstash" +) - // User The username for HTTP basic authentication. If `hasAuth` is set to `true` and `authType` is `webhook-authentication-basic`, this property is required. - User *string `json:"user,omitempty"` -} +// Defines values for NewOutputRemoteElasticsearchPreset. +const ( + NewOutputRemoteElasticsearchPresetBalanced NewOutputRemoteElasticsearchPreset = "balanced" + NewOutputRemoteElasticsearchPresetCustom NewOutputRemoteElasticsearchPreset = "custom" + NewOutputRemoteElasticsearchPresetLatency NewOutputRemoteElasticsearchPreset = "latency" + NewOutputRemoteElasticsearchPresetScale NewOutputRemoteElasticsearchPreset = "scale" + NewOutputRemoteElasticsearchPresetThroughput NewOutputRemoteElasticsearchPreset = "throughput" +) -// CertType If the `authType` is `webhook-authentication-ssl`, specifies whether the certificate authentication data is in a CRT and key file format or a PFX file format. -type CertType string +// Defines values for NewOutputRemoteElasticsearchType. +const ( + NewOutputRemoteElasticsearchTypeRemoteElasticsearch NewOutputRemoteElasticsearchType = "remote_elasticsearch" +) -// ConnectorResponse defines model for connector_response. -type ConnectorResponse struct { - Config *map[string]interface{} `json:"config,omitempty"` +// Defines values for NewOutputSslVerificationMode. +const ( + NewOutputSslVerificationModeCertificate NewOutputSslVerificationMode = "certificate" + NewOutputSslVerificationModeFull NewOutputSslVerificationMode = "full" + NewOutputSslVerificationModeNone NewOutputSslVerificationMode = "none" + NewOutputSslVerificationModeStrict NewOutputSslVerificationMode = "strict" +) - // ConnectorTypeId The connector type identifier. - ConnectorTypeId string `json:"connector_type_id"` +// Defines values for OutputElasticsearchPreset. +const ( + OutputElasticsearchPresetBalanced OutputElasticsearchPreset = "balanced" + OutputElasticsearchPresetCustom OutputElasticsearchPreset = "custom" + OutputElasticsearchPresetLatency OutputElasticsearchPreset = "latency" + OutputElasticsearchPresetScale OutputElasticsearchPreset = "scale" + OutputElasticsearchPresetThroughput OutputElasticsearchPreset = "throughput" +) - // Id The identifier for the connector. - Id string `json:"id"` +// Defines values for OutputElasticsearchType. +const ( + OutputElasticsearchTypeElasticsearch OutputElasticsearchType = "elasticsearch" +) - // IsDeprecated Indicates whether the connector is deprecated. - IsDeprecated bool `json:"is_deprecated"` +// Defines values for OutputKafkaAuthType. +const ( + OutputKafkaAuthTypeKerberos OutputKafkaAuthType = "kerberos" + OutputKafkaAuthTypeNone OutputKafkaAuthType = "none" + OutputKafkaAuthTypeSsl OutputKafkaAuthType = "ssl" + OutputKafkaAuthTypeUserPass OutputKafkaAuthType = "user_pass" +) - // IsMissingSecrets Indicates whether the connector is missing secrets. - IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` +// Defines values for OutputKafkaCompression. +const ( + OutputKafkaCompressionGzip OutputKafkaCompression = "gzip" + OutputKafkaCompressionLz4 OutputKafkaCompression = "lz4" + OutputKafkaCompressionNone OutputKafkaCompression = "none" + OutputKafkaCompressionSnappy OutputKafkaCompression = "snappy" +) - // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured bool `json:"is_preconfigured"` +// Defines values for OutputKafkaPartition. +const ( + OutputKafkaPartitionHash OutputKafkaPartition = "hash" + OutputKafkaPartitionRandom OutputKafkaPartition = "random" + OutputKafkaPartitionRoundRobin OutputKafkaPartition = "round_robin" +) - // IsSystemAction Indicates whether the connector is used for system actions. - IsSystemAction bool `json:"is_system_action"` +// Defines values for OutputKafkaRequiredAcks. +const ( + OutputKafkaRequiredAcksMinus1 OutputKafkaRequiredAcks = -1 + OutputKafkaRequiredAcksN0 OutputKafkaRequiredAcks = 0 + OutputKafkaRequiredAcksN1 OutputKafkaRequiredAcks = 1 +) - // Name The name of the rule. - Name string `json:"name"` -} +// Defines values for OutputKafkaSaslMechanism. +const ( + OutputKafkaSaslMechanismPLAIN OutputKafkaSaslMechanism = "PLAIN" + OutputKafkaSaslMechanismSCRAMSHA256 OutputKafkaSaslMechanism = "SCRAM-SHA-256" + OutputKafkaSaslMechanismSCRAMSHA512 OutputKafkaSaslMechanism = "SCRAM-SHA-512" +) -// CreateConnectorConfig The connector configuration details. -type CreateConnectorConfig struct { - AdditionalProperties map[string]interface{} `json:"-"` - union json.RawMessage -} +// Defines values for OutputKafkaType. +const ( + OutputKafkaTypeKafka OutputKafkaType = "kafka" +) -// CreateConnectorSecrets defines model for create_connector_secrets. -type CreateConnectorSecrets struct { - AdditionalProperties map[string]interface{} `json:"-"` - union json.RawMessage -} +// Defines values for OutputLogstashType. +const ( + OutputLogstashTypeLogstash OutputLogstashType = "logstash" +) -// CreateParamResponse defines model for create_param_response. -type CreateParamResponse struct { - union json.RawMessage -} +// Defines values for OutputRemoteElasticsearchPreset. +const ( + OutputRemoteElasticsearchPresetBalanced OutputRemoteElasticsearchPreset = "balanced" + OutputRemoteElasticsearchPresetCustom OutputRemoteElasticsearchPreset = "custom" + OutputRemoteElasticsearchPresetLatency OutputRemoteElasticsearchPreset = "latency" + OutputRemoteElasticsearchPresetScale OutputRemoteElasticsearchPreset = "scale" + OutputRemoteElasticsearchPresetThroughput OutputRemoteElasticsearchPreset = "throughput" +) -// CreateParamResponse0 defines model for . -type CreateParamResponse0 = []SyntheticsPostParameterResponse +// Defines values for OutputRemoteElasticsearchType. +const ( + OutputRemoteElasticsearchTypeRemoteElasticsearch OutputRemoteElasticsearchType = "remote_elasticsearch" +) -// CrowdstrikeConfig Defines config properties for connectors when type is `.crowdstrike`. -type CrowdstrikeConfig struct { - // Url The CrowdStrike tenant URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - Url string `json:"url"` -} +// Defines values for OutputSslVerificationMode. +const ( + OutputSslVerificationModeCertificate OutputSslVerificationMode = "certificate" + OutputSslVerificationModeFull OutputSslVerificationMode = "full" + OutputSslVerificationModeNone OutputSslVerificationMode = "none" + OutputSslVerificationModeStrict OutputSslVerificationMode = "strict" +) -// CrowdstrikeSecrets Defines secrets for connectors when type is `.crowdstrike`. -type CrowdstrikeSecrets struct { - // ClientId The CrowdStrike API client identifier. - ClientId string `json:"clientId"` +// Defines values for PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0. +const ( + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0CspRuleTemplate PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "csp-rule-template" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Dashboard PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "dashboard" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0IndexPattern PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "index-pattern" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Lens PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "lens" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Map PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "map" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0MlModule PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "ml-module" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0OsqueryPackAsset PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "osquery-pack-asset" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0OsquerySavedQuery PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "osquery-saved-query" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Search PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "search" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0SecurityRule PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "security-rule" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Tag PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "tag" + PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0Visualization PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 = "visualization" +) - // ClientSecret The CrowdStrike API client secret to authenticate the `clientId`. - ClientSecret string `json:"clientSecret"` -} +// Defines values for PackageInfoInstallationInfoInstallSource. +const ( + PackageInfoInstallationInfoInstallSourceBundled PackageInfoInstallationInfoInstallSource = "bundled" + PackageInfoInstallationInfoInstallSourceCustom PackageInfoInstallationInfoInstallSource = "custom" + PackageInfoInstallationInfoInstallSourceRegistry PackageInfoInstallationInfoInstallSource = "registry" + PackageInfoInstallationInfoInstallSourceUpload PackageInfoInstallationInfoInstallSource = "upload" +) -// Crt If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the CRT or CERT file. -type Crt = string +// Defines values for PackageInfoInstallationInfoInstallStatus. +const ( + PackageInfoInstallationInfoInstallStatusInstallFailed PackageInfoInstallationInfoInstallStatus = "install_failed" + PackageInfoInstallationInfoInstallStatusInstalled PackageInfoInstallationInfoInstallStatus = "installed" + PackageInfoInstallationInfoInstallStatusInstalling PackageInfoInstallationInfoInstallStatus = "installing" +) -// D3securityConfig Defines properties for connectors when type is `.d3security`. -type D3securityConfig struct { - // Url The D3 Security API request URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - Url string `json:"url"` -} +// Defines values for PackageInfoInstallationInfoInstalledEsType. +const ( + PackageInfoInstallationInfoInstalledEsTypeComponentTemplate PackageInfoInstallationInfoInstalledEsType = "component_template" + PackageInfoInstallationInfoInstalledEsTypeDataStreamIlmPolicy PackageInfoInstallationInfoInstalledEsType = "data_stream_ilm_policy" + PackageInfoInstallationInfoInstalledEsTypeIlmPolicy PackageInfoInstallationInfoInstalledEsType = "ilm_policy" + PackageInfoInstallationInfoInstalledEsTypeIndex PackageInfoInstallationInfoInstalledEsType = "index" + PackageInfoInstallationInfoInstalledEsTypeIndexTemplate PackageInfoInstallationInfoInstalledEsType = "index_template" + PackageInfoInstallationInfoInstalledEsTypeIngestPipeline PackageInfoInstallationInfoInstalledEsType = "ingest_pipeline" + PackageInfoInstallationInfoInstalledEsTypeKnowledgeBase PackageInfoInstallationInfoInstalledEsType = "knowledge_base" + PackageInfoInstallationInfoInstalledEsTypeMlModel PackageInfoInstallationInfoInstalledEsType = "ml_model" + PackageInfoInstallationInfoInstalledEsTypeTransform PackageInfoInstallationInfoInstalledEsType = "transform" +) -// D3securitySecrets Defines secrets for connectors when type is `.d3security`. -type D3securitySecrets struct { - // Token The D3 Security token. - Token string `json:"token"` -} +// Defines values for PackageInfoInstallationInfoInstalledKibanaType0. +const ( + PackageInfoInstallationInfoInstalledKibanaType0CspRuleTemplate PackageInfoInstallationInfoInstalledKibanaType0 = "csp-rule-template" + PackageInfoInstallationInfoInstalledKibanaType0Dashboard PackageInfoInstallationInfoInstalledKibanaType0 = "dashboard" + PackageInfoInstallationInfoInstalledKibanaType0IndexPattern PackageInfoInstallationInfoInstalledKibanaType0 = "index-pattern" + PackageInfoInstallationInfoInstalledKibanaType0Lens PackageInfoInstallationInfoInstalledKibanaType0 = "lens" + PackageInfoInstallationInfoInstalledKibanaType0Map PackageInfoInstallationInfoInstalledKibanaType0 = "map" + PackageInfoInstallationInfoInstalledKibanaType0MlModule PackageInfoInstallationInfoInstalledKibanaType0 = "ml-module" + PackageInfoInstallationInfoInstalledKibanaType0OsqueryPackAsset PackageInfoInstallationInfoInstalledKibanaType0 = "osquery-pack-asset" + PackageInfoInstallationInfoInstalledKibanaType0OsquerySavedQuery PackageInfoInstallationInfoInstalledKibanaType0 = "osquery-saved-query" + PackageInfoInstallationInfoInstalledKibanaType0Search PackageInfoInstallationInfoInstalledKibanaType0 = "search" + PackageInfoInstallationInfoInstalledKibanaType0SecurityRule PackageInfoInstallationInfoInstalledKibanaType0 = "security-rule" + PackageInfoInstallationInfoInstalledKibanaType0Tag PackageInfoInstallationInfoInstalledKibanaType0 = "tag" + PackageInfoInstallationInfoInstalledKibanaType0Visualization PackageInfoInstallationInfoInstalledKibanaType0 = "visualization" +) -// DefenderConfig Defines properties for connectors when type is `.microsoft_defender_endpoint`. -type DefenderConfig struct { - // ApiUrl The URL of the Microsoft Defender for Endpoint API. If you are using the `xpack.actions.allowedHosts` setting, make sure the hostname is added to the allowed hosts. - ApiUrl string `json:"apiUrl"` +// Defines values for PackageInfoInstallationInfoVerificationStatus. +const ( + PackageInfoInstallationInfoVerificationStatusUnknown PackageInfoInstallationInfoVerificationStatus = "unknown" + PackageInfoInstallationInfoVerificationStatusUnverified PackageInfoInstallationInfoVerificationStatus = "unverified" + PackageInfoInstallationInfoVerificationStatusVerified PackageInfoInstallationInfoVerificationStatus = "verified" +) - // ClientId The application (client) identifier for your app in the Azure portal. - ClientId *string `json:"clientId,omitempty"` +// Defines values for PackageInfoOwnerType. +const ( + PackageInfoOwnerTypeCommunity PackageInfoOwnerType = "community" + PackageInfoOwnerTypeElastic PackageInfoOwnerType = "elastic" + PackageInfoOwnerTypePartner PackageInfoOwnerType = "partner" +) - // OAuthScope The OAuth scopes or permission sets for the Microsoft Defender for Endpoint API. - OAuthScope *string `json:"oAuthScope,omitempty"` +// Defines values for PackageInfoRelease. +const ( + PackageInfoReleaseBeta PackageInfoRelease = "beta" + PackageInfoReleaseExperimental PackageInfoRelease = "experimental" + PackageInfoReleaseGa PackageInfoRelease = "ga" +) - // OAuthServerUrl The OAuth server URL where authentication is sent and received for the Microsoft Defender for Endpoint API. - OAuthServerUrl *string `json:"oAuthServerUrl,omitempty"` +// Defines values for PackageInfoType0. +const ( + PackageInfoType0Integration PackageInfoType0 = "integration" +) - // TenantId The tenant identifier for your app in the Azure portal. - TenantId *string `json:"tenantId,omitempty"` -} +// Defines values for PackageInfoType1. +const ( + PackageInfoType1Input PackageInfoType1 = "input" +) -// DefenderSecrets Defines secrets for connectors when type is `..microsoft_defender_endpoint`. -type DefenderSecrets struct { - // ClientSecret The client secret for your app in the Azure portal. - ClientSecret string `json:"clientSecret"` -} +// Defines values for PackageInfoType2. +const ( + PackageInfoType2Content PackageInfoType2 = "content" +) -// EmailConfig Defines properties for connectors when type is `.email`. -type EmailConfig struct { - // ClientId The client identifier, which is a part of OAuth 2.0 client credentials authentication, in GUID format. If `service` is `exchange_server`, this property is required. - ClientId *string `json:"clientId,omitempty"` +// Defines values for PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0. +const ( + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0CspRuleTemplate PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "csp-rule-template" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Dashboard PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "dashboard" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0IndexPattern PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "index-pattern" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Lens PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "lens" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Map PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "map" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0MlModule PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "ml-module" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0OsqueryPackAsset PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "osquery-pack-asset" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0OsquerySavedQuery PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "osquery-saved-query" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Search PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "search" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0SecurityRule PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "security-rule" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Tag PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "tag" + PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0Visualization PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 = "visualization" +) - // From The from address for all emails sent by the connector. It must be specified in `user@host-name` format. - From string `json:"from"` +// Defines values for PackageListItemInstallationInfoInstallSource. +const ( + PackageListItemInstallationInfoInstallSourceBundled PackageListItemInstallationInfoInstallSource = "bundled" + PackageListItemInstallationInfoInstallSourceCustom PackageListItemInstallationInfoInstallSource = "custom" + PackageListItemInstallationInfoInstallSourceRegistry PackageListItemInstallationInfoInstallSource = "registry" + PackageListItemInstallationInfoInstallSourceUpload PackageListItemInstallationInfoInstallSource = "upload" +) - // HasAuth Specifies whether a user and password are required inside the secrets configuration. - HasAuth *bool `json:"hasAuth,omitempty"` +// Defines values for PackageListItemInstallationInfoInstallStatus. +const ( + PackageListItemInstallationInfoInstallStatusInstallFailed PackageListItemInstallationInfoInstallStatus = "install_failed" + PackageListItemInstallationInfoInstallStatusInstalled PackageListItemInstallationInfoInstallStatus = "installed" + PackageListItemInstallationInfoInstallStatusInstalling PackageListItemInstallationInfoInstallStatus = "installing" +) - // Host The host name of the service provider. If the `service` is `elastic_cloud` (for Elastic Cloud notifications) or one of Nodemailer's well-known email service providers, this property is ignored. If `service` is `other`, this property must be defined. - Host *string `json:"host,omitempty"` - OauthTokenUrl *string `json:"oauthTokenUrl,omitempty"` +// Defines values for PackageListItemInstallationInfoInstalledEsType. +const ( + PackageListItemInstallationInfoInstalledEsTypeComponentTemplate PackageListItemInstallationInfoInstalledEsType = "component_template" + PackageListItemInstallationInfoInstalledEsTypeDataStreamIlmPolicy PackageListItemInstallationInfoInstalledEsType = "data_stream_ilm_policy" + PackageListItemInstallationInfoInstalledEsTypeIlmPolicy PackageListItemInstallationInfoInstalledEsType = "ilm_policy" + PackageListItemInstallationInfoInstalledEsTypeIndex PackageListItemInstallationInfoInstalledEsType = "index" + PackageListItemInstallationInfoInstalledEsTypeIndexTemplate PackageListItemInstallationInfoInstalledEsType = "index_template" + PackageListItemInstallationInfoInstalledEsTypeIngestPipeline PackageListItemInstallationInfoInstalledEsType = "ingest_pipeline" + PackageListItemInstallationInfoInstalledEsTypeKnowledgeBase PackageListItemInstallationInfoInstalledEsType = "knowledge_base" + PackageListItemInstallationInfoInstalledEsTypeMlModel PackageListItemInstallationInfoInstalledEsType = "ml_model" + PackageListItemInstallationInfoInstalledEsTypeTransform PackageListItemInstallationInfoInstalledEsType = "transform" +) - // Port The port to connect to on the service provider. If the `service` is `elastic_cloud` (for Elastic Cloud notifications) or one of Nodemailer's well-known email service providers, this property is ignored. If `service` is `other`, this property must be defined. - Port *int `json:"port,omitempty"` +// Defines values for PackageListItemInstallationInfoInstalledKibanaType0. +const ( + PackageListItemInstallationInfoInstalledKibanaType0CspRuleTemplate PackageListItemInstallationInfoInstalledKibanaType0 = "csp-rule-template" + PackageListItemInstallationInfoInstalledKibanaType0Dashboard PackageListItemInstallationInfoInstalledKibanaType0 = "dashboard" + PackageListItemInstallationInfoInstalledKibanaType0IndexPattern PackageListItemInstallationInfoInstalledKibanaType0 = "index-pattern" + PackageListItemInstallationInfoInstalledKibanaType0Lens PackageListItemInstallationInfoInstalledKibanaType0 = "lens" + PackageListItemInstallationInfoInstalledKibanaType0Map PackageListItemInstallationInfoInstalledKibanaType0 = "map" + PackageListItemInstallationInfoInstalledKibanaType0MlModule PackageListItemInstallationInfoInstalledKibanaType0 = "ml-module" + PackageListItemInstallationInfoInstalledKibanaType0OsqueryPackAsset PackageListItemInstallationInfoInstalledKibanaType0 = "osquery-pack-asset" + PackageListItemInstallationInfoInstalledKibanaType0OsquerySavedQuery PackageListItemInstallationInfoInstalledKibanaType0 = "osquery-saved-query" + PackageListItemInstallationInfoInstalledKibanaType0Search PackageListItemInstallationInfoInstalledKibanaType0 = "search" + PackageListItemInstallationInfoInstalledKibanaType0SecurityRule PackageListItemInstallationInfoInstalledKibanaType0 = "security-rule" + PackageListItemInstallationInfoInstalledKibanaType0Tag PackageListItemInstallationInfoInstalledKibanaType0 = "tag" + PackageListItemInstallationInfoInstalledKibanaType0Visualization PackageListItemInstallationInfoInstalledKibanaType0 = "visualization" +) - // Secure Specifies whether the connection to the service provider will use TLS. If the `service` is `elastic_cloud` (for Elastic Cloud notifications) or one of Nodemailer's well-known email service providers, this property is ignored. - Secure *bool `json:"secure,omitempty"` +// Defines values for PackageListItemInstallationInfoVerificationStatus. +const ( + PackageListItemInstallationInfoVerificationStatusUnknown PackageListItemInstallationInfoVerificationStatus = "unknown" + PackageListItemInstallationInfoVerificationStatusUnverified PackageListItemInstallationInfoVerificationStatus = "unverified" + PackageListItemInstallationInfoVerificationStatusVerified PackageListItemInstallationInfoVerificationStatus = "verified" +) - // Service The name of the email service. - Service *EmailConfigService `json:"service,omitempty"` +// Defines values for PackageListItemOwnerType. +const ( + PackageListItemOwnerTypeCommunity PackageListItemOwnerType = "community" + PackageListItemOwnerTypeElastic PackageListItemOwnerType = "elastic" + PackageListItemOwnerTypePartner PackageListItemOwnerType = "partner" +) - // TenantId The tenant identifier, which is part of OAuth 2.0 client credentials authentication, in GUID format. If `service` is `exchange_server`, this property is required. - TenantId *string `json:"tenantId,omitempty"` -} +// Defines values for PackageListItemRelease. +const ( + Beta PackageListItemRelease = "beta" + Experimental PackageListItemRelease = "experimental" + Ga PackageListItemRelease = "ga" +) -// EmailConfigService The name of the email service. -type EmailConfigService string +// Defines values for PackageListItemType0. +const ( + PackageListItemType0Integration PackageListItemType0 = "integration" +) -// EmailSecrets Defines secrets for connectors when type is `.email`. -type EmailSecrets struct { - // ClientSecret The Microsoft Exchange Client secret for OAuth 2.0 client credentials authentication. It must be URL-encoded. If `service` is `exchange_server`, this property is required. - ClientSecret *string `json:"clientSecret,omitempty"` +// Defines values for PackageListItemType1. +const ( + PackageListItemType1Input PackageListItemType1 = "input" +) - // Password The password for HTTP basic authentication. If `hasAuth` is set to `true`, this property is required. - Password *string `json:"password,omitempty"` +// Defines values for PackageListItemType2. +const ( + PackageListItemType2Content PackageListItemType2 = "content" +) - // User The username for HTTP basic authentication. If `hasAuth` is set to `true`, this property is required. - User *string `json:"user,omitempty"` -} +// Defines values for ParamsEsQueryDslRuleSearchType. +const ( + EsQuery ParamsEsQueryDslRuleSearchType = "esQuery" +) -// EnrollmentApiKey defines model for enrollment_api_key. -type EnrollmentApiKey struct { - // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. - Active bool `json:"active"` +// Defines values for ParamsEsQueryEsqlRuleSearchType. +const ( + ParamsEsQueryEsqlRuleSearchTypeEsqlQuery ParamsEsQueryEsqlRuleSearchType = "esqlQuery" +) - // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. - ApiKey string `json:"api_key"` +// Defines values for ParamsEsQueryEsqlRuleThresholdComparator. +const ( + ParamsEsQueryEsqlRuleThresholdComparatorGreaterThan ParamsEsQueryEsqlRuleThresholdComparator = ">" +) - // ApiKeyId The ID of the API key in the Security API. - ApiKeyId string `json:"api_key_id"` - CreatedAt string `json:"created_at"` - Hidden *bool `json:"hidden,omitempty"` - Id string `json:"id"` +// Defines values for ParamsEsQueryKqlRuleSearchType. +const ( + SearchSource ParamsEsQueryKqlRuleSearchType = "searchSource" +) - // Name The name of the enrollment API key. - Name *string `json:"name,omitempty"` +// Defines values for ParamsPropertyApmAnomalyAnomalySeverityType. +const ( + ParamsPropertyApmAnomalyAnomalySeverityTypeCritical ParamsPropertyApmAnomalyAnomalySeverityType = "critical" + ParamsPropertyApmAnomalyAnomalySeverityTypeMajor ParamsPropertyApmAnomalyAnomalySeverityType = "major" + ParamsPropertyApmAnomalyAnomalySeverityTypeMinor ParamsPropertyApmAnomalyAnomalySeverityType = "minor" + ParamsPropertyApmAnomalyAnomalySeverityTypeWarning ParamsPropertyApmAnomalyAnomalySeverityType = "warning" +) - // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. - PolicyId *string `json:"policy_id,omitempty"` -} +// Defines values for ParamsPropertyApmAnomalyWindowUnit. +const ( + ParamsPropertyApmAnomalyWindowUnitD ParamsPropertyApmAnomalyWindowUnit = "d" + ParamsPropertyApmAnomalyWindowUnitH ParamsPropertyApmAnomalyWindowUnit = "h" + ParamsPropertyApmAnomalyWindowUnitM ParamsPropertyApmAnomalyWindowUnit = "m" +) -// GeminiConfig Defines properties for connectors when type is `.gemini`. -type GeminiConfig struct { - // ApiUrl The Google Gemini request URL. - ApiUrl string `json:"apiUrl"` +// Defines values for ParamsPropertyApmErrorCountGroupBy. +const ( + ParamsPropertyApmErrorCountGroupByErrorGroupingKey ParamsPropertyApmErrorCountGroupBy = "error.grouping_key" + ParamsPropertyApmErrorCountGroupByServiceEnvironment ParamsPropertyApmErrorCountGroupBy = "service.environment" + ParamsPropertyApmErrorCountGroupByServiceName ParamsPropertyApmErrorCountGroupBy = "service.name" + ParamsPropertyApmErrorCountGroupByTransactionName ParamsPropertyApmErrorCountGroupBy = "transaction.name" +) - // DefaultModel The generative artificial intelligence model for Google Gemini to use. - DefaultModel *string `json:"defaultModel,omitempty"` +// Defines values for ParamsPropertyApmErrorCountWindowUnit. +const ( + ParamsPropertyApmErrorCountWindowUnitD ParamsPropertyApmErrorCountWindowUnit = "d" + ParamsPropertyApmErrorCountWindowUnitH ParamsPropertyApmErrorCountWindowUnit = "h" + ParamsPropertyApmErrorCountWindowUnitM ParamsPropertyApmErrorCountWindowUnit = "m" +) - // GcpProjectID The Google ProjectID that has Vertex AI endpoint enabled. - GcpProjectID string `json:"gcpProjectID"` +// Defines values for ParamsPropertyApmTransactionDurationAggregationType. +const ( + ParamsPropertyApmTransactionDurationAggregationTypeAvg ParamsPropertyApmTransactionDurationAggregationType = "avg" + ParamsPropertyApmTransactionDurationAggregationTypeN95th ParamsPropertyApmTransactionDurationAggregationType = "95th" + ParamsPropertyApmTransactionDurationAggregationTypeN99th ParamsPropertyApmTransactionDurationAggregationType = "99th" +) - // GcpRegion The GCP region where the Vertex AI endpoint enabled. - GcpRegion string `json:"gcpRegion"` -} +// Defines values for ParamsPropertyApmTransactionDurationGroupBy. +const ( + ParamsPropertyApmTransactionDurationGroupByServiceEnvironment ParamsPropertyApmTransactionDurationGroupBy = "service.environment" + ParamsPropertyApmTransactionDurationGroupByServiceName ParamsPropertyApmTransactionDurationGroupBy = "service.name" + ParamsPropertyApmTransactionDurationGroupByTransactionName ParamsPropertyApmTransactionDurationGroupBy = "transaction.name" + ParamsPropertyApmTransactionDurationGroupByTransactionType ParamsPropertyApmTransactionDurationGroupBy = "transaction.type" +) -// GeminiSecrets Defines secrets for connectors when type is `.gemini`. -type GeminiSecrets struct { - // CredentialsJson The service account credentials JSON file. The service account should have Vertex AI user IAM role assigned to it. - CredentialsJson string `json:"credentialsJson"` -} +// Defines values for ParamsPropertyApmTransactionDurationWindowUnit. +const ( + ParamsPropertyApmTransactionDurationWindowUnitD ParamsPropertyApmTransactionDurationWindowUnit = "d" + ParamsPropertyApmTransactionDurationWindowUnitH ParamsPropertyApmTransactionDurationWindowUnit = "h" + ParamsPropertyApmTransactionDurationWindowUnitM ParamsPropertyApmTransactionDurationWindowUnit = "m" +) -// GenaiAzureConfig Defines properties for connectors when type is `.gen-ai` and the API provider is `Azure OpenAI`. -type GenaiAzureConfig struct { - // ApiProvider The OpenAI API provider. - ApiProvider GenaiAzureConfigApiProvider `json:"apiProvider"` +// Defines values for ParamsPropertyApmTransactionErrorRateGroupBy. +const ( + ParamsPropertyApmTransactionErrorRateGroupByServiceEnvironment ParamsPropertyApmTransactionErrorRateGroupBy = "service.environment" + ParamsPropertyApmTransactionErrorRateGroupByServiceName ParamsPropertyApmTransactionErrorRateGroupBy = "service.name" + ParamsPropertyApmTransactionErrorRateGroupByTransactionName ParamsPropertyApmTransactionErrorRateGroupBy = "transaction.name" + ParamsPropertyApmTransactionErrorRateGroupByTransactionType ParamsPropertyApmTransactionErrorRateGroupBy = "transaction.type" +) - // ApiUrl The OpenAI API endpoint. - ApiUrl string `json:"apiUrl"` -} +// Defines values for ParamsPropertyApmTransactionErrorRateWindowUnit. +const ( + ParamsPropertyApmTransactionErrorRateWindowUnitD ParamsPropertyApmTransactionErrorRateWindowUnit = "d" + ParamsPropertyApmTransactionErrorRateWindowUnitH ParamsPropertyApmTransactionErrorRateWindowUnit = "h" + ParamsPropertyApmTransactionErrorRateWindowUnitM ParamsPropertyApmTransactionErrorRateWindowUnit = "m" +) -// GenaiAzureConfigApiProvider The OpenAI API provider. -type GenaiAzureConfigApiProvider string +// Defines values for ParamsPropertyInfraInventoryCriteriaComparator. +const ( + ParamsPropertyInfraInventoryCriteriaComparatorBetween ParamsPropertyInfraInventoryCriteriaComparator = "between" + ParamsPropertyInfraInventoryCriteriaComparatorGreaterThan ParamsPropertyInfraInventoryCriteriaComparator = ">" + ParamsPropertyInfraInventoryCriteriaComparatorGreaterThanEqual ParamsPropertyInfraInventoryCriteriaComparator = ">=" + ParamsPropertyInfraInventoryCriteriaComparatorLessThan ParamsPropertyInfraInventoryCriteriaComparator = "<" + ParamsPropertyInfraInventoryCriteriaComparatorLessThanEqual ParamsPropertyInfraInventoryCriteriaComparator = "<=" + ParamsPropertyInfraInventoryCriteriaComparatorOutside ParamsPropertyInfraInventoryCriteriaComparator = "outside" +) -// GenaiOpenaiConfig Defines properties for connectors when type is `.gen-ai` and the API provider is `OpenAI`. -type GenaiOpenaiConfig struct { - // ApiProvider The OpenAI API provider. - ApiProvider GenaiOpenaiConfigApiProvider `json:"apiProvider"` +// Defines values for ParamsPropertyInfraInventoryCriteriaCustomMetricAggregation. +const ( + ParamsPropertyInfraInventoryCriteriaCustomMetricAggregationAvg ParamsPropertyInfraInventoryCriteriaCustomMetricAggregation = "avg" + ParamsPropertyInfraInventoryCriteriaCustomMetricAggregationMax ParamsPropertyInfraInventoryCriteriaCustomMetricAggregation = "max" + ParamsPropertyInfraInventoryCriteriaCustomMetricAggregationMin ParamsPropertyInfraInventoryCriteriaCustomMetricAggregation = "min" + ParamsPropertyInfraInventoryCriteriaCustomMetricAggregationRate ParamsPropertyInfraInventoryCriteriaCustomMetricAggregation = "rate" +) - // ApiUrl The OpenAI API endpoint. - ApiUrl string `json:"apiUrl"` +// Defines values for ParamsPropertyInfraInventoryCriteriaCustomMetricType. +const ( + ParamsPropertyInfraInventoryCriteriaCustomMetricTypeCustom ParamsPropertyInfraInventoryCriteriaCustomMetricType = "custom" +) - // DefaultModel The default model to use for requests. - DefaultModel *string `json:"defaultModel,omitempty"` -} +// Defines values for ParamsPropertyInfraInventoryCriteriaMetric. +const ( + ParamsPropertyInfraInventoryCriteriaMetricCount ParamsPropertyInfraInventoryCriteriaMetric = "count" + ParamsPropertyInfraInventoryCriteriaMetricCpu ParamsPropertyInfraInventoryCriteriaMetric = "cpu" + ParamsPropertyInfraInventoryCriteriaMetricCustom ParamsPropertyInfraInventoryCriteriaMetric = "custom" + ParamsPropertyInfraInventoryCriteriaMetricDiskIOReadBytes ParamsPropertyInfraInventoryCriteriaMetric = "diskIOReadBytes" + ParamsPropertyInfraInventoryCriteriaMetricDiskIOWriteBytes ParamsPropertyInfraInventoryCriteriaMetric = "diskIOWriteBytes" + ParamsPropertyInfraInventoryCriteriaMetricDiskLatency ParamsPropertyInfraInventoryCriteriaMetric = "diskLatency" + ParamsPropertyInfraInventoryCriteriaMetricLoad ParamsPropertyInfraInventoryCriteriaMetric = "load" + ParamsPropertyInfraInventoryCriteriaMetricLogRate ParamsPropertyInfraInventoryCriteriaMetric = "logRate" + ParamsPropertyInfraInventoryCriteriaMetricMemory ParamsPropertyInfraInventoryCriteriaMetric = "memory" + ParamsPropertyInfraInventoryCriteriaMetricMemoryTotal ParamsPropertyInfraInventoryCriteriaMetric = "memoryTotal" + ParamsPropertyInfraInventoryCriteriaMetricRdsActiveTransactions ParamsPropertyInfraInventoryCriteriaMetric = "rdsActiveTransactions" + ParamsPropertyInfraInventoryCriteriaMetricRdsConnections ParamsPropertyInfraInventoryCriteriaMetric = "rdsConnections" + ParamsPropertyInfraInventoryCriteriaMetricRdsLatency ParamsPropertyInfraInventoryCriteriaMetric = "rdsLatency" + ParamsPropertyInfraInventoryCriteriaMetricRdsQueriesExecuted ParamsPropertyInfraInventoryCriteriaMetric = "rdsQueriesExecuted" + ParamsPropertyInfraInventoryCriteriaMetricRx ParamsPropertyInfraInventoryCriteriaMetric = "rx" + ParamsPropertyInfraInventoryCriteriaMetricS3BucketSize ParamsPropertyInfraInventoryCriteriaMetric = "s3BucketSize" + ParamsPropertyInfraInventoryCriteriaMetricS3DownloadBytes ParamsPropertyInfraInventoryCriteriaMetric = "s3DownloadBytes" + ParamsPropertyInfraInventoryCriteriaMetricS3NumberOfObjects ParamsPropertyInfraInventoryCriteriaMetric = "s3NumberOfObjects" + ParamsPropertyInfraInventoryCriteriaMetricS3TotalRequests ParamsPropertyInfraInventoryCriteriaMetric = "s3TotalRequests" + ParamsPropertyInfraInventoryCriteriaMetricS3UploadBytes ParamsPropertyInfraInventoryCriteriaMetric = "s3UploadBytes" + ParamsPropertyInfraInventoryCriteriaMetricSqsMessagesDelayed ParamsPropertyInfraInventoryCriteriaMetric = "sqsMessagesDelayed" + ParamsPropertyInfraInventoryCriteriaMetricSqsMessagesEmpty ParamsPropertyInfraInventoryCriteriaMetric = "sqsMessagesEmpty" + ParamsPropertyInfraInventoryCriteriaMetricSqsMessagesSent ParamsPropertyInfraInventoryCriteriaMetric = "sqsMessagesSent" + ParamsPropertyInfraInventoryCriteriaMetricSqsMessagesVisible ParamsPropertyInfraInventoryCriteriaMetric = "sqsMessagesVisible" + ParamsPropertyInfraInventoryCriteriaMetricSqsOldestMessage ParamsPropertyInfraInventoryCriteriaMetric = "sqsOldestMessage" + ParamsPropertyInfraInventoryCriteriaMetricTx ParamsPropertyInfraInventoryCriteriaMetric = "tx" +) -// GenaiOpenaiConfigApiProvider The OpenAI API provider. -type GenaiOpenaiConfigApiProvider string +// Defines values for ParamsPropertyInfraInventoryCriteriaTimeUnit. +const ( + ParamsPropertyInfraInventoryCriteriaTimeUnitD ParamsPropertyInfraInventoryCriteriaTimeUnit = "d" + ParamsPropertyInfraInventoryCriteriaTimeUnitH ParamsPropertyInfraInventoryCriteriaTimeUnit = "h" + ParamsPropertyInfraInventoryCriteriaTimeUnitM ParamsPropertyInfraInventoryCriteriaTimeUnit = "m" + ParamsPropertyInfraInventoryCriteriaTimeUnitS ParamsPropertyInfraInventoryCriteriaTimeUnit = "s" +) -// GenaiOpenaiOtherConfig Defines properties for connectors when type is `.gen-ai` and the API provider is `Other` (OpenAI-compatible service), including optional PKI authentication. -type GenaiOpenaiOtherConfig struct { - // ApiProvider The OpenAI API provider. - ApiProvider GenaiOpenaiOtherConfigApiProvider `json:"apiProvider"` +// Defines values for ParamsPropertyInfraInventoryCriteriaWarningComparator. +const ( + ParamsPropertyInfraInventoryCriteriaWarningComparatorBetween ParamsPropertyInfraInventoryCriteriaWarningComparator = "between" + ParamsPropertyInfraInventoryCriteriaWarningComparatorGreaterThan ParamsPropertyInfraInventoryCriteriaWarningComparator = ">" + ParamsPropertyInfraInventoryCriteriaWarningComparatorGreaterThanEqual ParamsPropertyInfraInventoryCriteriaWarningComparator = ">=" + ParamsPropertyInfraInventoryCriteriaWarningComparatorLessThan ParamsPropertyInfraInventoryCriteriaWarningComparator = "<" + ParamsPropertyInfraInventoryCriteriaWarningComparatorLessThanEqual ParamsPropertyInfraInventoryCriteriaWarningComparator = "<=" + ParamsPropertyInfraInventoryCriteriaWarningComparatorOutside ParamsPropertyInfraInventoryCriteriaWarningComparator = "outside" +) - // ApiUrl The OpenAI-compatible API endpoint. - ApiUrl string `json:"apiUrl"` +// Defines values for ParamsPropertyInfraInventoryNodeType. +const ( + ParamsPropertyInfraInventoryNodeTypeAwsEC2 ParamsPropertyInfraInventoryNodeType = "awsEC2" + ParamsPropertyInfraInventoryNodeTypeAwsRDS ParamsPropertyInfraInventoryNodeType = "awsRDS" + ParamsPropertyInfraInventoryNodeTypeAwsS3 ParamsPropertyInfraInventoryNodeType = "awsS3" + ParamsPropertyInfraInventoryNodeTypeAwsSQS ParamsPropertyInfraInventoryNodeType = "awsSQS" + ParamsPropertyInfraInventoryNodeTypeContainer ParamsPropertyInfraInventoryNodeType = "container" + ParamsPropertyInfraInventoryNodeTypeHost ParamsPropertyInfraInventoryNodeType = "host" + ParamsPropertyInfraInventoryNodeTypePod ParamsPropertyInfraInventoryNodeType = "pod" +) - // CaData PEM-encoded CA certificate content. - CaData *string `json:"caData,omitempty"` +// Defines values for ParamsPropertyInfraMetricThresholdCriteria0AggType. +const ( + ParamsPropertyInfraMetricThresholdCriteria0AggTypeAvg ParamsPropertyInfraMetricThresholdCriteria0AggType = "avg" + ParamsPropertyInfraMetricThresholdCriteria0AggTypeCardinality ParamsPropertyInfraMetricThresholdCriteria0AggType = "cardinality" + ParamsPropertyInfraMetricThresholdCriteria0AggTypeCount ParamsPropertyInfraMetricThresholdCriteria0AggType = "count" + ParamsPropertyInfraMetricThresholdCriteria0AggTypeCustom ParamsPropertyInfraMetricThresholdCriteria0AggType = "custom" + ParamsPropertyInfraMetricThresholdCriteria0AggTypeMax ParamsPropertyInfraMetricThresholdCriteria0AggType = "max" + ParamsPropertyInfraMetricThresholdCriteria0AggTypeMin ParamsPropertyInfraMetricThresholdCriteria0AggType = "min" + ParamsPropertyInfraMetricThresholdCriteria0AggTypeP95 ParamsPropertyInfraMetricThresholdCriteria0AggType = "p95" + ParamsPropertyInfraMetricThresholdCriteria0AggTypeP99 ParamsPropertyInfraMetricThresholdCriteria0AggType = "p99" + ParamsPropertyInfraMetricThresholdCriteria0AggTypeRate ParamsPropertyInfraMetricThresholdCriteria0AggType = "rate" + ParamsPropertyInfraMetricThresholdCriteria0AggTypeSum ParamsPropertyInfraMetricThresholdCriteria0AggType = "sum" +) - // CertificateData PEM-encoded certificate content. - CertificateData *string `json:"certificateData,omitempty"` +// Defines values for ParamsPropertyInfraMetricThresholdCriteria0Comparator. +const ( + ParamsPropertyInfraMetricThresholdCriteria0ComparatorBetween ParamsPropertyInfraMetricThresholdCriteria0Comparator = "between" + ParamsPropertyInfraMetricThresholdCriteria0ComparatorGreaterThan ParamsPropertyInfraMetricThresholdCriteria0Comparator = ">" + ParamsPropertyInfraMetricThresholdCriteria0ComparatorGreaterThanEqual ParamsPropertyInfraMetricThresholdCriteria0Comparator = ">=" + ParamsPropertyInfraMetricThresholdCriteria0ComparatorLessThan ParamsPropertyInfraMetricThresholdCriteria0Comparator = "<" + ParamsPropertyInfraMetricThresholdCriteria0ComparatorLessThanEqual ParamsPropertyInfraMetricThresholdCriteria0Comparator = "<=" + ParamsPropertyInfraMetricThresholdCriteria0ComparatorOutside ParamsPropertyInfraMetricThresholdCriteria0Comparator = "outside" +) - // DefaultModel The default model to use for requests. - DefaultModel string `json:"defaultModel"` +// Defines values for ParamsPropertyInfraMetricThresholdCriteria0TimeUnit. +const ( + ParamsPropertyInfraMetricThresholdCriteria0TimeUnitD ParamsPropertyInfraMetricThresholdCriteria0TimeUnit = "d" + ParamsPropertyInfraMetricThresholdCriteria0TimeUnitH ParamsPropertyInfraMetricThresholdCriteria0TimeUnit = "h" + ParamsPropertyInfraMetricThresholdCriteria0TimeUnitM ParamsPropertyInfraMetricThresholdCriteria0TimeUnit = "m" + ParamsPropertyInfraMetricThresholdCriteria0TimeUnitS ParamsPropertyInfraMetricThresholdCriteria0TimeUnit = "s" +) - // Headers Custom headers to include in requests. - Headers *map[string]string `json:"headers,omitempty"` +// Defines values for ParamsPropertyInfraMetricThresholdCriteria0WarningComparator. +const ( + ParamsPropertyInfraMetricThresholdCriteria0WarningComparatorBetween ParamsPropertyInfraMetricThresholdCriteria0WarningComparator = "between" + ParamsPropertyInfraMetricThresholdCriteria0WarningComparatorGreaterThan ParamsPropertyInfraMetricThresholdCriteria0WarningComparator = ">" + ParamsPropertyInfraMetricThresholdCriteria0WarningComparatorGreaterThanEqual ParamsPropertyInfraMetricThresholdCriteria0WarningComparator = ">=" + ParamsPropertyInfraMetricThresholdCriteria0WarningComparatorLessThan ParamsPropertyInfraMetricThresholdCriteria0WarningComparator = "<" + ParamsPropertyInfraMetricThresholdCriteria0WarningComparatorLessThanEqual ParamsPropertyInfraMetricThresholdCriteria0WarningComparator = "<=" + ParamsPropertyInfraMetricThresholdCriteria0WarningComparatorOutside ParamsPropertyInfraMetricThresholdCriteria0WarningComparator = "outside" +) - // PrivateKeyData PEM-encoded private key content. - PrivateKeyData *string `json:"privateKeyData,omitempty"` +// Defines values for ParamsPropertyInfraMetricThresholdCriteria1AggType. +const ( + ParamsPropertyInfraMetricThresholdCriteria1AggTypeCount ParamsPropertyInfraMetricThresholdCriteria1AggType = "count" +) - // VerificationMode SSL verification mode for PKI authentication. - VerificationMode *GenaiOpenaiOtherConfigVerificationMode `json:"verificationMode,omitempty"` -} +// Defines values for ParamsPropertyInfraMetricThresholdCriteria1Comparator. +const ( + ParamsPropertyInfraMetricThresholdCriteria1ComparatorBetween ParamsPropertyInfraMetricThresholdCriteria1Comparator = "between" + ParamsPropertyInfraMetricThresholdCriteria1ComparatorGreaterThan ParamsPropertyInfraMetricThresholdCriteria1Comparator = ">" + ParamsPropertyInfraMetricThresholdCriteria1ComparatorGreaterThanEqual ParamsPropertyInfraMetricThresholdCriteria1Comparator = ">=" + ParamsPropertyInfraMetricThresholdCriteria1ComparatorLessThan ParamsPropertyInfraMetricThresholdCriteria1Comparator = "<" + ParamsPropertyInfraMetricThresholdCriteria1ComparatorLessThanEqual ParamsPropertyInfraMetricThresholdCriteria1Comparator = "<=" + ParamsPropertyInfraMetricThresholdCriteria1ComparatorOutside ParamsPropertyInfraMetricThresholdCriteria1Comparator = "outside" +) -// GenaiOpenaiOtherConfigApiProvider The OpenAI API provider. -type GenaiOpenaiOtherConfigApiProvider string +// Defines values for ParamsPropertyInfraMetricThresholdCriteria1TimeUnit. +const ( + ParamsPropertyInfraMetricThresholdCriteria1TimeUnitD ParamsPropertyInfraMetricThresholdCriteria1TimeUnit = "d" + ParamsPropertyInfraMetricThresholdCriteria1TimeUnitH ParamsPropertyInfraMetricThresholdCriteria1TimeUnit = "h" + ParamsPropertyInfraMetricThresholdCriteria1TimeUnitM ParamsPropertyInfraMetricThresholdCriteria1TimeUnit = "m" + ParamsPropertyInfraMetricThresholdCriteria1TimeUnitS ParamsPropertyInfraMetricThresholdCriteria1TimeUnit = "s" +) -// GenaiOpenaiOtherConfigVerificationMode SSL verification mode for PKI authentication. -type GenaiOpenaiOtherConfigVerificationMode string +// Defines values for ParamsPropertyInfraMetricThresholdCriteria1WarningComparator. +const ( + ParamsPropertyInfraMetricThresholdCriteria1WarningComparatorBetween ParamsPropertyInfraMetricThresholdCriteria1WarningComparator = "between" + ParamsPropertyInfraMetricThresholdCriteria1WarningComparatorGreaterThan ParamsPropertyInfraMetricThresholdCriteria1WarningComparator = ">" + ParamsPropertyInfraMetricThresholdCriteria1WarningComparatorGreaterThanEqual ParamsPropertyInfraMetricThresholdCriteria1WarningComparator = ">=" + ParamsPropertyInfraMetricThresholdCriteria1WarningComparatorLessThan ParamsPropertyInfraMetricThresholdCriteria1WarningComparator = "<" + ParamsPropertyInfraMetricThresholdCriteria1WarningComparatorLessThanEqual ParamsPropertyInfraMetricThresholdCriteria1WarningComparator = "<=" + ParamsPropertyInfraMetricThresholdCriteria1WarningComparatorOutside ParamsPropertyInfraMetricThresholdCriteria1WarningComparator = "outside" +) -// GenaiSecrets Defines secrets for connectors when type is `.gen-ai`. Supports both API key authentication (OpenAI, Azure OpenAI, and `Other`) and PKI authentication (`Other` provider only). PKI fields must be base64-encoded PEM content. -type GenaiSecrets struct { - // ApiKey The API key for authentication. For OpenAI and Azure OpenAI providers, it is required. For the `Other` provider, it is required if you do not use PKI authentication. With PKI, you can also optionally include an API key if the OpenAI-compatible service supports or requires one. - ApiKey *string `json:"apiKey,omitempty"` +// Defines values for ParamsPropertyInfraMetricThresholdCriteria2AggType. +const ( + ParamsPropertyInfraMetricThresholdCriteria2AggTypeCustom ParamsPropertyInfraMetricThresholdCriteria2AggType = "custom" +) - // CaData Base64-encoded PEM CA certificate content for PKI authentication (Other provider only). Optional. - CaData *string `json:"caData,omitempty"` +// Defines values for ParamsPropertyInfraMetricThresholdCriteria2Comparator. +const ( + ParamsPropertyInfraMetricThresholdCriteria2ComparatorBetween ParamsPropertyInfraMetricThresholdCriteria2Comparator = "between" + ParamsPropertyInfraMetricThresholdCriteria2ComparatorGreaterThan ParamsPropertyInfraMetricThresholdCriteria2Comparator = ">" + ParamsPropertyInfraMetricThresholdCriteria2ComparatorGreaterThanEqual ParamsPropertyInfraMetricThresholdCriteria2Comparator = ">=" + ParamsPropertyInfraMetricThresholdCriteria2ComparatorLessThan ParamsPropertyInfraMetricThresholdCriteria2Comparator = "<" + ParamsPropertyInfraMetricThresholdCriteria2ComparatorLessThanEqual ParamsPropertyInfraMetricThresholdCriteria2Comparator = "<=" + ParamsPropertyInfraMetricThresholdCriteria2ComparatorOutside ParamsPropertyInfraMetricThresholdCriteria2Comparator = "outside" +) - // CertificateData Base64-encoded PEM certificate content for PKI authentication (Other provider only). Required for PKI. - CertificateData *string `json:"certificateData,omitempty"` +// Defines values for ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0AggType. +const ( + Avg ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0AggType = "avg" + Cardinality ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0AggType = "cardinality" + Max ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0AggType = "max" + Min ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0AggType = "min" + Sum ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0AggType = "sum" +) + +// Defines values for ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1AggType. +const ( + Count ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1AggType = "count" +) + +// Defines values for ParamsPropertyInfraMetricThresholdCriteria2TimeUnit. +const ( + ParamsPropertyInfraMetricThresholdCriteria2TimeUnitD ParamsPropertyInfraMetricThresholdCriteria2TimeUnit = "d" + ParamsPropertyInfraMetricThresholdCriteria2TimeUnitH ParamsPropertyInfraMetricThresholdCriteria2TimeUnit = "h" + ParamsPropertyInfraMetricThresholdCriteria2TimeUnitM ParamsPropertyInfraMetricThresholdCriteria2TimeUnit = "m" + ParamsPropertyInfraMetricThresholdCriteria2TimeUnitS ParamsPropertyInfraMetricThresholdCriteria2TimeUnit = "s" +) + +// Defines values for ParamsPropertyInfraMetricThresholdCriteria2WarningComparator. +const ( + ParamsPropertyInfraMetricThresholdCriteria2WarningComparatorBetween ParamsPropertyInfraMetricThresholdCriteria2WarningComparator = "between" + ParamsPropertyInfraMetricThresholdCriteria2WarningComparatorGreaterThan ParamsPropertyInfraMetricThresholdCriteria2WarningComparator = ">" + ParamsPropertyInfraMetricThresholdCriteria2WarningComparatorGreaterThanEqual ParamsPropertyInfraMetricThresholdCriteria2WarningComparator = ">=" + ParamsPropertyInfraMetricThresholdCriteria2WarningComparatorLessThan ParamsPropertyInfraMetricThresholdCriteria2WarningComparator = "<" + ParamsPropertyInfraMetricThresholdCriteria2WarningComparatorLessThanEqual ParamsPropertyInfraMetricThresholdCriteria2WarningComparator = "<=" + ParamsPropertyInfraMetricThresholdCriteria2WarningComparatorOutside ParamsPropertyInfraMetricThresholdCriteria2WarningComparator = "outside" +) + +// Defines values for ParamsPropertyLogThreshold0CountComparator. +const ( + ParamsPropertyLogThreshold0CountComparatorDoesNotEqual ParamsPropertyLogThreshold0CountComparator = "does not equal" + ParamsPropertyLogThreshold0CountComparatorDoesNotMatch ParamsPropertyLogThreshold0CountComparator = "does not match" + ParamsPropertyLogThreshold0CountComparatorDoesNotMatchPhrase ParamsPropertyLogThreshold0CountComparator = "does not match phrase" + ParamsPropertyLogThreshold0CountComparatorEquals ParamsPropertyLogThreshold0CountComparator = "equals" + ParamsPropertyLogThreshold0CountComparatorLessThan ParamsPropertyLogThreshold0CountComparator = "less than" + ParamsPropertyLogThreshold0CountComparatorLessThanOrEquals ParamsPropertyLogThreshold0CountComparator = "less than or equals" + ParamsPropertyLogThreshold0CountComparatorMatches ParamsPropertyLogThreshold0CountComparator = "matches" + ParamsPropertyLogThreshold0CountComparatorMatchesPhrase ParamsPropertyLogThreshold0CountComparator = "matches phrase" + ParamsPropertyLogThreshold0CountComparatorMoreThan ParamsPropertyLogThreshold0CountComparator = "more than" + ParamsPropertyLogThreshold0CountComparatorMoreThanOrEquals ParamsPropertyLogThreshold0CountComparator = "more than or equals" +) + +// Defines values for ParamsPropertyLogThreshold0CriteriaComparator. +const ( + ParamsPropertyLogThreshold0CriteriaComparatorDoesNotEqual ParamsPropertyLogThreshold0CriteriaComparator = "does not equal" + ParamsPropertyLogThreshold0CriteriaComparatorDoesNotMatch ParamsPropertyLogThreshold0CriteriaComparator = "does not match" + ParamsPropertyLogThreshold0CriteriaComparatorDoesNotMatchPhrase ParamsPropertyLogThreshold0CriteriaComparator = "does not match phrase" + ParamsPropertyLogThreshold0CriteriaComparatorEquals ParamsPropertyLogThreshold0CriteriaComparator = "equals" + ParamsPropertyLogThreshold0CriteriaComparatorLessThan ParamsPropertyLogThreshold0CriteriaComparator = "less than" + ParamsPropertyLogThreshold0CriteriaComparatorLessThanOrEquals ParamsPropertyLogThreshold0CriteriaComparator = "less than or equals" + ParamsPropertyLogThreshold0CriteriaComparatorMatches ParamsPropertyLogThreshold0CriteriaComparator = "matches" + ParamsPropertyLogThreshold0CriteriaComparatorMatchesPhrase ParamsPropertyLogThreshold0CriteriaComparator = "matches phrase" + ParamsPropertyLogThreshold0CriteriaComparatorMoreThan ParamsPropertyLogThreshold0CriteriaComparator = "more than" + ParamsPropertyLogThreshold0CriteriaComparatorMoreThanOrEquals ParamsPropertyLogThreshold0CriteriaComparator = "more than or equals" +) + +// Defines values for ParamsPropertyLogThreshold0LogViewType. +const ( + ParamsPropertyLogThreshold0LogViewTypeLogViewReference ParamsPropertyLogThreshold0LogViewType = "log-view-reference" +) + +// Defines values for ParamsPropertyLogThreshold0TimeUnit. +const ( + ParamsPropertyLogThreshold0TimeUnitD ParamsPropertyLogThreshold0TimeUnit = "d" + ParamsPropertyLogThreshold0TimeUnitH ParamsPropertyLogThreshold0TimeUnit = "h" + ParamsPropertyLogThreshold0TimeUnitM ParamsPropertyLogThreshold0TimeUnit = "m" + ParamsPropertyLogThreshold0TimeUnitS ParamsPropertyLogThreshold0TimeUnit = "s" +) + +// Defines values for ParamsPropertyLogThreshold1CountComparator. +const ( + ParamsPropertyLogThreshold1CountComparatorDoesNotEqual ParamsPropertyLogThreshold1CountComparator = "does not equal" + ParamsPropertyLogThreshold1CountComparatorDoesNotMatch ParamsPropertyLogThreshold1CountComparator = "does not match" + ParamsPropertyLogThreshold1CountComparatorDoesNotMatchPhrase ParamsPropertyLogThreshold1CountComparator = "does not match phrase" + ParamsPropertyLogThreshold1CountComparatorEquals ParamsPropertyLogThreshold1CountComparator = "equals" + ParamsPropertyLogThreshold1CountComparatorLessThan ParamsPropertyLogThreshold1CountComparator = "less than" + ParamsPropertyLogThreshold1CountComparatorLessThanOrEquals ParamsPropertyLogThreshold1CountComparator = "less than or equals" + ParamsPropertyLogThreshold1CountComparatorMatches ParamsPropertyLogThreshold1CountComparator = "matches" + ParamsPropertyLogThreshold1CountComparatorMatchesPhrase ParamsPropertyLogThreshold1CountComparator = "matches phrase" + ParamsPropertyLogThreshold1CountComparatorMoreThan ParamsPropertyLogThreshold1CountComparator = "more than" + ParamsPropertyLogThreshold1CountComparatorMoreThanOrEquals ParamsPropertyLogThreshold1CountComparator = "more than or equals" +) + +// Defines values for ParamsPropertyLogThreshold1CriteriaComparator. +const ( + ParamsPropertyLogThreshold1CriteriaComparatorDoesNotEqual ParamsPropertyLogThreshold1CriteriaComparator = "does not equal" + ParamsPropertyLogThreshold1CriteriaComparatorDoesNotMatch ParamsPropertyLogThreshold1CriteriaComparator = "does not match" + ParamsPropertyLogThreshold1CriteriaComparatorDoesNotMatchPhrase ParamsPropertyLogThreshold1CriteriaComparator = "does not match phrase" + ParamsPropertyLogThreshold1CriteriaComparatorEquals ParamsPropertyLogThreshold1CriteriaComparator = "equals" + ParamsPropertyLogThreshold1CriteriaComparatorLessThan ParamsPropertyLogThreshold1CriteriaComparator = "less than" + ParamsPropertyLogThreshold1CriteriaComparatorLessThanOrEquals ParamsPropertyLogThreshold1CriteriaComparator = "less than or equals" + ParamsPropertyLogThreshold1CriteriaComparatorMatches ParamsPropertyLogThreshold1CriteriaComparator = "matches" + ParamsPropertyLogThreshold1CriteriaComparatorMatchesPhrase ParamsPropertyLogThreshold1CriteriaComparator = "matches phrase" + ParamsPropertyLogThreshold1CriteriaComparatorMoreThan ParamsPropertyLogThreshold1CriteriaComparator = "more than" + ParamsPropertyLogThreshold1CriteriaComparatorMoreThanOrEquals ParamsPropertyLogThreshold1CriteriaComparator = "more than or equals" +) + +// Defines values for ParamsPropertyLogThreshold1LogViewType. +const ( + ParamsPropertyLogThreshold1LogViewTypeLogViewReference ParamsPropertyLogThreshold1LogViewType = "log-view-reference" +) + +// Defines values for ParamsPropertyLogThreshold1TimeUnit. +const ( + ParamsPropertyLogThreshold1TimeUnitD ParamsPropertyLogThreshold1TimeUnit = "d" + ParamsPropertyLogThreshold1TimeUnitH ParamsPropertyLogThreshold1TimeUnit = "h" + ParamsPropertyLogThreshold1TimeUnitM ParamsPropertyLogThreshold1TimeUnit = "m" + ParamsPropertyLogThreshold1TimeUnitS ParamsPropertyLogThreshold1TimeUnit = "s" +) + +// Defines values for RunAcknowledgeResolvePagerdutyEventAction. +const ( + Acknowledge RunAcknowledgeResolvePagerdutyEventAction = "acknowledge" + Resolve RunAcknowledgeResolvePagerdutyEventAction = "resolve" +) + +// Defines values for RunAddeventSubAction. +const ( + AddEvent RunAddeventSubAction = "addEvent" +) + +// Defines values for RunClosealertSubAction. +const ( + CloseAlert RunClosealertSubAction = "closeAlert" +) + +// Defines values for RunCloseincidentSubAction. +const ( + CloseIncident RunCloseincidentSubAction = "closeIncident" +) + +// Defines values for RunCreatealertSubAction. +const ( + CreateAlert RunCreatealertSubAction = "createAlert" +) + +// Defines values for RunCreatealertSubActionParamsPriority. +const ( + P1 RunCreatealertSubActionParamsPriority = "P1" + P2 RunCreatealertSubActionParamsPriority = "P2" + P3 RunCreatealertSubActionParamsPriority = "P3" + P4 RunCreatealertSubActionParamsPriority = "P4" + P5 RunCreatealertSubActionParamsPriority = "P5" +) + +// Defines values for RunCreatealertSubActionParamsRespondersType. +const ( + RunCreatealertSubActionParamsRespondersTypeEscalation RunCreatealertSubActionParamsRespondersType = "escalation" + RunCreatealertSubActionParamsRespondersTypeSchedule RunCreatealertSubActionParamsRespondersType = "schedule" + RunCreatealertSubActionParamsRespondersTypeTeam RunCreatealertSubActionParamsRespondersType = "team" + RunCreatealertSubActionParamsRespondersTypeUser RunCreatealertSubActionParamsRespondersType = "user" +) + +// Defines values for RunCreatealertSubActionParamsVisibleToType. +const ( + RunCreatealertSubActionParamsVisibleToTypeTeam RunCreatealertSubActionParamsVisibleToType = "team" + RunCreatealertSubActionParamsVisibleToTypeUser RunCreatealertSubActionParamsVisibleToType = "user" +) + +// Defines values for RunFieldsbyissuetypeSubAction. +const ( + FieldsByIssueType RunFieldsbyissuetypeSubAction = "fieldsByIssueType" +) + +// Defines values for RunGetagentdetailsSubAction. +const ( + GetAgentDetails RunGetagentdetailsSubAction = "getAgentDetails" +) + +// Defines values for RunGetagentsSubAction. +const ( + GetAgents RunGetagentsSubAction = "getAgents" +) + +// Defines values for RunGetchoicesSubAction. +const ( + GetChoices RunGetchoicesSubAction = "getChoices" +) + +// Defines values for RunGetfieldsSubAction. +const ( + GetFields RunGetfieldsSubAction = "getFields" +) + +// Defines values for RunGetincidentSubAction. +const ( + GetIncident RunGetincidentSubAction = "getIncident" +) + +// Defines values for RunIssueSubAction. +const ( + Issue RunIssueSubAction = "issue" +) + +// Defines values for RunIssuesSubAction. +const ( + Issues RunIssuesSubAction = "issues" +) + +// Defines values for RunIssuetypesSubAction. +const ( + IssueTypes RunIssuetypesSubAction = "issueTypes" +) + +// Defines values for RunMessageServerlogLevel. +const ( + RunMessageServerlogLevelDebug RunMessageServerlogLevel = "debug" + RunMessageServerlogLevelError RunMessageServerlogLevel = "error" + RunMessageServerlogLevelFatal RunMessageServerlogLevel = "fatal" + RunMessageServerlogLevelInfo RunMessageServerlogLevel = "info" + RunMessageServerlogLevelTrace RunMessageServerlogLevel = "trace" + RunMessageServerlogLevelWarn RunMessageServerlogLevel = "warn" +) + +// Defines values for RunPostmessageSubAction. +const ( + PostMessage RunPostmessageSubAction = "postMessage" +) + +// Defines values for RunPushtoserviceSubAction. +const ( + PushToService RunPushtoserviceSubAction = "pushToService" +) + +// Defines values for RunTriggerPagerdutyEventAction. +const ( + Trigger RunTriggerPagerdutyEventAction = "trigger" +) + +// Defines values for RunTriggerPagerdutySeverity. +const ( + RunTriggerPagerdutySeverityCritical RunTriggerPagerdutySeverity = "critical" + RunTriggerPagerdutySeverityError RunTriggerPagerdutySeverity = "error" + RunTriggerPagerdutySeverityInfo RunTriggerPagerdutySeverity = "info" + RunTriggerPagerdutySeverityWarning RunTriggerPagerdutySeverity = "warning" +) + +// Defines values for RunValidchannelidSubAction. +const ( + ValidChannelId RunValidchannelidSubAction = "validChannelId" +) + +// Defines values for ServerHostSslClientAuth. +const ( + ServerHostSslClientAuthNone ServerHostSslClientAuth = "none" + ServerHostSslClientAuthOptional ServerHostSslClientAuth = "optional" + ServerHostSslClientAuthRequired ServerHostSslClientAuth = "required" +) + +// Defines values for SwimlaneConfigConnectorType. +const ( + SwimlaneConfigConnectorTypeAlerts SwimlaneConfigConnectorType = "alerts" + SwimlaneConfigConnectorTypeAll SwimlaneConfigConnectorType = "all" + SwimlaneConfigConnectorTypeCases SwimlaneConfigConnectorType = "cases" +) + +// Defines values for Thresholdcomparator. +const ( + ThresholdcomparatorBetween Thresholdcomparator = "between" + ThresholdcomparatorGreaterThan Thresholdcomparator = ">" + ThresholdcomparatorGreaterThanEqual Thresholdcomparator = ">=" + ThresholdcomparatorLessThan Thresholdcomparator = "<" + ThresholdcomparatorLessThanEqual Thresholdcomparator = "<=" + ThresholdcomparatorNotBetween Thresholdcomparator = "notBetween" +) + +// Defines values for Timewindowunit. +const ( + TimewindowunitD Timewindowunit = "d" + TimewindowunitH Timewindowunit = "h" + TimewindowunitM Timewindowunit = "m" + TimewindowunitS Timewindowunit = "s" +) + +// Defines values for UpdateOutputElasticsearchPreset. +const ( + UpdateOutputElasticsearchPresetBalanced UpdateOutputElasticsearchPreset = "balanced" + UpdateOutputElasticsearchPresetCustom UpdateOutputElasticsearchPreset = "custom" + UpdateOutputElasticsearchPresetLatency UpdateOutputElasticsearchPreset = "latency" + UpdateOutputElasticsearchPresetScale UpdateOutputElasticsearchPreset = "scale" + UpdateOutputElasticsearchPresetThroughput UpdateOutputElasticsearchPreset = "throughput" +) + +// Defines values for UpdateOutputElasticsearchType. +const ( + Elasticsearch UpdateOutputElasticsearchType = "elasticsearch" +) + +// Defines values for UpdateOutputKafkaAuthType. +const ( + UpdateOutputKafkaAuthTypeKerberos UpdateOutputKafkaAuthType = "kerberos" + UpdateOutputKafkaAuthTypeNone UpdateOutputKafkaAuthType = "none" + UpdateOutputKafkaAuthTypeSsl UpdateOutputKafkaAuthType = "ssl" + UpdateOutputKafkaAuthTypeUserPass UpdateOutputKafkaAuthType = "user_pass" +) + +// Defines values for UpdateOutputKafkaCompression. +const ( + UpdateOutputKafkaCompressionGzip UpdateOutputKafkaCompression = "gzip" + UpdateOutputKafkaCompressionLz4 UpdateOutputKafkaCompression = "lz4" + UpdateOutputKafkaCompressionNone UpdateOutputKafkaCompression = "none" + UpdateOutputKafkaCompressionSnappy UpdateOutputKafkaCompression = "snappy" +) + +// Defines values for UpdateOutputKafkaPartition. +const ( + Hash UpdateOutputKafkaPartition = "hash" + Random UpdateOutputKafkaPartition = "random" + RoundRobin UpdateOutputKafkaPartition = "round_robin" +) + +// Defines values for UpdateOutputKafkaRequiredAcks. +const ( + UpdateOutputKafkaRequiredAcksMinus1 UpdateOutputKafkaRequiredAcks = -1 + UpdateOutputKafkaRequiredAcksN0 UpdateOutputKafkaRequiredAcks = 0 + UpdateOutputKafkaRequiredAcksN1 UpdateOutputKafkaRequiredAcks = 1 +) + +// Defines values for UpdateOutputKafkaSaslMechanism. +const ( + PLAIN UpdateOutputKafkaSaslMechanism = "PLAIN" + SCRAMSHA256 UpdateOutputKafkaSaslMechanism = "SCRAM-SHA-256" + SCRAMSHA512 UpdateOutputKafkaSaslMechanism = "SCRAM-SHA-512" +) + +// Defines values for UpdateOutputKafkaType. +const ( + Kafka UpdateOutputKafkaType = "kafka" +) + +// Defines values for UpdateOutputLogstashType. +const ( + Logstash UpdateOutputLogstashType = "logstash" +) + +// Defines values for UpdateOutputRemoteElasticsearchPreset. +const ( + Balanced UpdateOutputRemoteElasticsearchPreset = "balanced" + Custom UpdateOutputRemoteElasticsearchPreset = "custom" + Latency UpdateOutputRemoteElasticsearchPreset = "latency" + Scale UpdateOutputRemoteElasticsearchPreset = "scale" + Throughput UpdateOutputRemoteElasticsearchPreset = "throughput" +) + +// Defines values for UpdateOutputRemoteElasticsearchType. +const ( + RemoteElasticsearch UpdateOutputRemoteElasticsearchType = "remote_elasticsearch" +) + +// Defines values for UpdateOutputSslVerificationMode. +const ( + UpdateOutputSslVerificationModeCertificate UpdateOutputSslVerificationMode = "certificate" + UpdateOutputSslVerificationModeFull UpdateOutputSslVerificationMode = "full" + UpdateOutputSslVerificationModeNone UpdateOutputSslVerificationMode = "none" + UpdateOutputSslVerificationModeStrict UpdateOutputSslVerificationMode = "strict" +) + +// Defines values for VerificationMode. +const ( + VerificationModeCertificate VerificationMode = "certificate" + VerificationModeFull VerificationMode = "full" + VerificationModeNone VerificationMode = "none" +) + +// Defines values for WebhookConfigMethod. +const ( + WebhookConfigMethodPost WebhookConfigMethod = "post" + WebhookConfigMethodPut WebhookConfigMethod = "put" +) + +// Defines values for APMUIElasticApiVersion. +const ( + APMUIElasticApiVersionN20231031 APMUIElasticApiVersion = "2023-10-31" +) + +// Defines values for CasesSeverity. +const ( + CasesSeverityCritical CasesSeverity = "critical" + CasesSeverityHigh CasesSeverity = "high" + CasesSeverityLow CasesSeverity = "low" + CasesSeverityMedium CasesSeverity = "medium" +) + +// Defines values for CasesSortField. +const ( + CasesSortFieldCategory CasesSortField = "category" + CasesSortFieldClosedAt CasesSortField = "closedAt" + CasesSortFieldCreatedAt CasesSortField = "createdAt" + CasesSortFieldSeverity CasesSortField = "severity" + CasesSortFieldStatus CasesSortField = "status" + CasesSortFieldTitle CasesSortField = "title" + CasesSortFieldUpdatedAt CasesSortField = "updatedAt" +) + +// Defines values for CasesSortOrder. +const ( + CasesSortOrderAsc CasesSortOrder = "asc" + CasesSortOrderDesc CasesSortOrder = "desc" +) + +// Defines values for CasesStatus. +const ( + CasesStatusClosed CasesStatus = "closed" + CasesStatusInProgress CasesStatus = "in-progress" + CasesStatusOpen CasesStatus = "open" +) + +// Defines values for PostAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore. +const ( + PostAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStoreAppState PostAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore = "appState" + PostAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStoreGlobalState PostAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore = "globalState" +) + +// Defines values for PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays. +const ( + PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDaysN1 PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 1 + PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDaysN2 PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 2 + PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDaysN3 PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 3 + PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDaysN4 PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 4 + PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDaysN5 PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 5 + PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDaysN6 PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 6 + PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDaysN7 PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 7 +) + +// Defines values for PostAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen. +const ( + PostAlertingRuleIdJSONBodyActionsFrequencyNotifyWhenOnActionGroupChange PostAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen = "onActionGroupChange" + PostAlertingRuleIdJSONBodyActionsFrequencyNotifyWhenOnActiveAlert PostAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen = "onActiveAlert" + PostAlertingRuleIdJSONBodyActionsFrequencyNotifyWhenOnThrottleInterval PostAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen = "onThrottleInterval" +) + +// Defines values for PostAlertingRuleIdJSONBodyNotifyWhen. +const ( + PostAlertingRuleIdJSONBodyNotifyWhenOnActionGroupChange PostAlertingRuleIdJSONBodyNotifyWhen = "onActionGroupChange" + PostAlertingRuleIdJSONBodyNotifyWhenOnActiveAlert PostAlertingRuleIdJSONBodyNotifyWhen = "onActiveAlert" + PostAlertingRuleIdJSONBodyNotifyWhenOnThrottleInterval PostAlertingRuleIdJSONBodyNotifyWhen = "onThrottleInterval" +) + +// Defines values for PutAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore. +const ( + PutAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStoreAppState PutAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore = "appState" + PutAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStoreGlobalState PutAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore = "globalState" +) + +// Defines values for PutAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays. +const ( + N1 PutAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 1 + N2 PutAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 2 + N3 PutAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 3 + N4 PutAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 4 + N5 PutAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 5 + N6 PutAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 6 + N7 PutAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays = 7 +) + +// Defines values for PutAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen. +const ( + PutAlertingRuleIdJSONBodyActionsFrequencyNotifyWhenOnActionGroupChange PutAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen = "onActionGroupChange" + PutAlertingRuleIdJSONBodyActionsFrequencyNotifyWhenOnActiveAlert PutAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen = "onActiveAlert" + PutAlertingRuleIdJSONBodyActionsFrequencyNotifyWhenOnThrottleInterval PutAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen = "onThrottleInterval" +) + +// Defines values for PutAlertingRuleIdJSONBodyNotifyWhen. +const ( + OnActionGroupChange PutAlertingRuleIdJSONBodyNotifyWhen = "onActionGroupChange" + OnActiveAlert PutAlertingRuleIdJSONBodyNotifyWhen = "onActiveAlert" + OnThrottleInterval PutAlertingRuleIdJSONBodyNotifyWhen = "onThrottleInterval" +) + +// Defines values for GetAlertingRulesFindParamsDefaultSearchOperator. +const ( + AND GetAlertingRulesFindParamsDefaultSearchOperator = "AND" + OR GetAlertingRulesFindParamsDefaultSearchOperator = "OR" +) + +// Defines values for GetAlertingRulesFindParamsSortOrder. +const ( + GetAlertingRulesFindParamsSortOrderAsc GetAlertingRulesFindParamsSortOrder = "asc" + GetAlertingRulesFindParamsSortOrderDesc GetAlertingRulesFindParamsSortOrder = "desc" +) + +// Defines values for CreateAgentKeyParamsElasticApiVersion. +const ( + CreateAgentKeyParamsElasticApiVersionN20231031 CreateAgentKeyParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for SaveApmServerSchemaParamsElasticApiVersion. +const ( + SaveApmServerSchemaParamsElasticApiVersionN20231031 SaveApmServerSchemaParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for CreateAnnotationParamsElasticApiVersion. +const ( + CreateAnnotationParamsElasticApiVersionN20231031 CreateAnnotationParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for GetAnnotationParamsElasticApiVersion. +const ( + GetAnnotationParamsElasticApiVersionN20231031 GetAnnotationParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for DeleteAgentConfigurationParamsElasticApiVersion. +const ( + DeleteAgentConfigurationParamsElasticApiVersionN20231031 DeleteAgentConfigurationParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for GetAgentConfigurationsParamsElasticApiVersion. +const ( + GetAgentConfigurationsParamsElasticApiVersionN20231031 GetAgentConfigurationsParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for CreateUpdateAgentConfigurationParamsElasticApiVersion. +const ( + CreateUpdateAgentConfigurationParamsElasticApiVersionN20231031 CreateUpdateAgentConfigurationParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for GetAgentNameForServiceParamsElasticApiVersion. +const ( + GetAgentNameForServiceParamsElasticApiVersionN20231031 GetAgentNameForServiceParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for GetEnvironmentsForServiceParamsElasticApiVersion. +const ( + GetEnvironmentsForServiceParamsElasticApiVersionN20231031 GetEnvironmentsForServiceParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for SearchSingleConfigurationParamsElasticApiVersion. +const ( + SearchSingleConfigurationParamsElasticApiVersionN20231031 SearchSingleConfigurationParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for GetSingleAgentConfigurationParamsElasticApiVersion. +const ( + GetSingleAgentConfigurationParamsElasticApiVersionN20231031 GetSingleAgentConfigurationParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for GetSourceMapsParamsElasticApiVersion. +const ( + GetSourceMapsParamsElasticApiVersionN20231031 GetSourceMapsParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for UploadSourceMapParamsElasticApiVersion. +const ( + UploadSourceMapParamsElasticApiVersionN20231031 UploadSourceMapParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for DeleteSourceMapParamsElasticApiVersion. +const ( + N20231031 DeleteSourceMapParamsElasticApiVersion = "2023-10-31" +) + +// Defines values for DeleteAssetCriticalityRecordParamsRefresh. +const ( + DeleteAssetCriticalityRecordParamsRefreshWaitFor DeleteAssetCriticalityRecordParamsRefresh = "wait_for" +) + +// Defines values for CreateAssetCriticalityRecordJSONBodyRefresh. +const ( + CreateAssetCriticalityRecordJSONBodyRefreshWaitFor CreateAssetCriticalityRecordJSONBodyRefresh = "wait_for" +) + +// Defines values for FindAssetCriticalityRecordsParamsSortField. +const ( + CriticalityLevel FindAssetCriticalityRecordsParamsSortField = "criticality_level" + IdField FindAssetCriticalityRecordsParamsSortField = "id_field" + IdValue FindAssetCriticalityRecordsParamsSortField = "id_value" + Timestamp FindAssetCriticalityRecordsParamsSortField = "@timestamp" +) + +// Defines values for FindAssetCriticalityRecordsParamsSortDirection. +const ( + FindAssetCriticalityRecordsParamsSortDirectionAsc FindAssetCriticalityRecordsParamsSortDirection = "asc" + FindAssetCriticalityRecordsParamsSortDirectionDesc FindAssetCriticalityRecordsParamsSortDirection = "desc" +) + +// Defines values for FindCasesDefaultSpaceParamsSeverity. +const ( + FindCasesDefaultSpaceParamsSeverityCritical FindCasesDefaultSpaceParamsSeverity = "critical" + FindCasesDefaultSpaceParamsSeverityHigh FindCasesDefaultSpaceParamsSeverity = "high" + FindCasesDefaultSpaceParamsSeverityLow FindCasesDefaultSpaceParamsSeverity = "low" + FindCasesDefaultSpaceParamsSeverityMedium FindCasesDefaultSpaceParamsSeverity = "medium" +) + +// Defines values for FindCasesDefaultSpaceParamsSortField. +const ( + FindCasesDefaultSpaceParamsSortFieldCategory FindCasesDefaultSpaceParamsSortField = "category" + FindCasesDefaultSpaceParamsSortFieldClosedAt FindCasesDefaultSpaceParamsSortField = "closedAt" + FindCasesDefaultSpaceParamsSortFieldCreatedAt FindCasesDefaultSpaceParamsSortField = "createdAt" + FindCasesDefaultSpaceParamsSortFieldSeverity FindCasesDefaultSpaceParamsSortField = "severity" + FindCasesDefaultSpaceParamsSortFieldStatus FindCasesDefaultSpaceParamsSortField = "status" + FindCasesDefaultSpaceParamsSortFieldTitle FindCasesDefaultSpaceParamsSortField = "title" + FindCasesDefaultSpaceParamsSortFieldUpdatedAt FindCasesDefaultSpaceParamsSortField = "updatedAt" +) + +// Defines values for FindCasesDefaultSpaceParamsSortOrder. +const ( + FindCasesDefaultSpaceParamsSortOrderAsc FindCasesDefaultSpaceParamsSortOrder = "asc" + FindCasesDefaultSpaceParamsSortOrderDesc FindCasesDefaultSpaceParamsSortOrder = "desc" +) + +// Defines values for FindCasesDefaultSpaceParamsStatus. +const ( + FindCasesDefaultSpaceParamsStatusClosed FindCasesDefaultSpaceParamsStatus = "closed" + FindCasesDefaultSpaceParamsStatusInProgress FindCasesDefaultSpaceParamsStatus = "in-progress" + FindCasesDefaultSpaceParamsStatusOpen FindCasesDefaultSpaceParamsStatus = "open" +) + +// Defines values for FindCaseCommentsDefaultSpaceParamsSortOrder. +const ( + FindCaseCommentsDefaultSpaceParamsSortOrderAsc FindCaseCommentsDefaultSpaceParamsSortOrder = "asc" + FindCaseCommentsDefaultSpaceParamsSortOrderDesc FindCaseCommentsDefaultSpaceParamsSortOrder = "desc" +) + +// Defines values for FindCaseActivityDefaultSpaceParamsSortOrder. +const ( + FindCaseActivityDefaultSpaceParamsSortOrderAsc FindCaseActivityDefaultSpaceParamsSortOrder = "asc" + FindCaseActivityDefaultSpaceParamsSortOrderDesc FindCaseActivityDefaultSpaceParamsSortOrder = "desc" +) + +// Defines values for FindCaseActivityDefaultSpaceParamsTypes. +const ( + FindCaseActivityDefaultSpaceParamsTypesAction FindCaseActivityDefaultSpaceParamsTypes = "action" + FindCaseActivityDefaultSpaceParamsTypesAlert FindCaseActivityDefaultSpaceParamsTypes = "alert" + FindCaseActivityDefaultSpaceParamsTypesAssignees FindCaseActivityDefaultSpaceParamsTypes = "assignees" + FindCaseActivityDefaultSpaceParamsTypesAttachment FindCaseActivityDefaultSpaceParamsTypes = "attachment" + FindCaseActivityDefaultSpaceParamsTypesComment FindCaseActivityDefaultSpaceParamsTypes = "comment" + FindCaseActivityDefaultSpaceParamsTypesConnector FindCaseActivityDefaultSpaceParamsTypes = "connector" + FindCaseActivityDefaultSpaceParamsTypesCreateCase FindCaseActivityDefaultSpaceParamsTypes = "create_case" + FindCaseActivityDefaultSpaceParamsTypesDescription FindCaseActivityDefaultSpaceParamsTypes = "description" + FindCaseActivityDefaultSpaceParamsTypesPushed FindCaseActivityDefaultSpaceParamsTypes = "pushed" + FindCaseActivityDefaultSpaceParamsTypesSettings FindCaseActivityDefaultSpaceParamsTypes = "settings" + FindCaseActivityDefaultSpaceParamsTypesSeverity FindCaseActivityDefaultSpaceParamsTypes = "severity" + FindCaseActivityDefaultSpaceParamsTypesStatus FindCaseActivityDefaultSpaceParamsTypes = "status" + FindCaseActivityDefaultSpaceParamsTypesTags FindCaseActivityDefaultSpaceParamsTypes = "tags" + FindCaseActivityDefaultSpaceParamsTypesTitle FindCaseActivityDefaultSpaceParamsTypes = "title" + FindCaseActivityDefaultSpaceParamsTypesUser FindCaseActivityDefaultSpaceParamsTypes = "user" +) + +// Defines values for RulePreviewJSONBody0Type. +const ( + RulePreviewJSONBody0TypeEql RulePreviewJSONBody0Type = "eql" +) + +// Defines values for RulePreviewJSONBody1Type. +const ( + Query RulePreviewJSONBody1Type = "query" +) + +// Defines values for RulePreviewJSONBody2Type. +const ( + SavedQuery RulePreviewJSONBody2Type = "saved_query" +) + +// Defines values for RulePreviewJSONBody3Type. +const ( + RulePreviewJSONBody3TypeThreshold RulePreviewJSONBody3Type = "threshold" +) + +// Defines values for RulePreviewJSONBody4Type. +const ( + ThreatMatch RulePreviewJSONBody4Type = "threat_match" +) + +// Defines values for RulePreviewJSONBody5Type. +const ( + MachineLearning RulePreviewJSONBody5Type = "machine_learning" +) + +// Defines values for RulePreviewJSONBody6Type. +const ( + NewTerms RulePreviewJSONBody6Type = "new_terms" +) + +// Defines values for RulePreviewJSONBody7Type. +const ( + Esql RulePreviewJSONBody7Type = "esql" +) + +// Defines values for FindEndpointListItemsParamsSortOrder. +const ( + FindEndpointListItemsParamsSortOrderAsc FindEndpointListItemsParamsSortOrder = "asc" + FindEndpointListItemsParamsSortOrderDesc FindEndpointListItemsParamsSortOrder = "desc" +) + +// Defines values for ListEntitiesParamsSortOrder. +const ( + ListEntitiesParamsSortOrderAsc ListEntitiesParamsSortOrder = "asc" + ListEntitiesParamsSortOrderDesc ListEntitiesParamsSortOrder = "desc" +) + +// Defines values for DuplicateExceptionListParamsIncludeExpiredExceptions. +const ( + DuplicateExceptionListParamsIncludeExpiredExceptionsFalse DuplicateExceptionListParamsIncludeExpiredExceptions = "false" + DuplicateExceptionListParamsIncludeExpiredExceptionsTrue DuplicateExceptionListParamsIncludeExpiredExceptions = "true" +) + +// Defines values for ExportExceptionListParamsIncludeExpiredExceptions. +const ( + ExportExceptionListParamsIncludeExpiredExceptionsFalse ExportExceptionListParamsIncludeExpiredExceptions = "false" + ExportExceptionListParamsIncludeExpiredExceptionsTrue ExportExceptionListParamsIncludeExpiredExceptions = "true" +) + +// Defines values for FindExceptionListsParamsSortOrder. +const ( + FindExceptionListsParamsSortOrderAsc FindExceptionListsParamsSortOrder = "asc" + FindExceptionListsParamsSortOrderDesc FindExceptionListsParamsSortOrder = "desc" +) + +// Defines values for FindExceptionListItemsParamsSortOrder. +const ( + FindExceptionListItemsParamsSortOrderAsc FindExceptionListItemsParamsSortOrder = "asc" + FindExceptionListItemsParamsSortOrderDesc FindExceptionListItemsParamsSortOrder = "desc" +) + +// Defines values for GetFleetAgentPoliciesParamsSortOrder. +const ( + GetFleetAgentPoliciesParamsSortOrderAsc GetFleetAgentPoliciesParamsSortOrder = "asc" + GetFleetAgentPoliciesParamsSortOrderDesc GetFleetAgentPoliciesParamsSortOrder = "desc" +) + +// Defines values for GetFleetAgentPoliciesParamsFormat. +const ( + GetFleetAgentPoliciesParamsFormatLegacy GetFleetAgentPoliciesParamsFormat = "legacy" + GetFleetAgentPoliciesParamsFormatSimplified GetFleetAgentPoliciesParamsFormat = "simplified" +) + +// Defines values for PostFleetAgentPoliciesJSONBodyMonitoringEnabled. +const ( + PostFleetAgentPoliciesJSONBodyMonitoringEnabledLogs PostFleetAgentPoliciesJSONBodyMonitoringEnabled = "logs" + PostFleetAgentPoliciesJSONBodyMonitoringEnabledMetrics PostFleetAgentPoliciesJSONBodyMonitoringEnabled = "metrics" + PostFleetAgentPoliciesJSONBodyMonitoringEnabledTraces PostFleetAgentPoliciesJSONBodyMonitoringEnabled = "traces" +) + +// Defines values for PostFleetAgentPoliciesBulkGetParamsFormat. +const ( + PostFleetAgentPoliciesBulkGetParamsFormatLegacy PostFleetAgentPoliciesBulkGetParamsFormat = "legacy" + PostFleetAgentPoliciesBulkGetParamsFormatSimplified PostFleetAgentPoliciesBulkGetParamsFormat = "simplified" +) + +// Defines values for GetFleetAgentPoliciesAgentpolicyidParamsFormat. +const ( + GetFleetAgentPoliciesAgentpolicyidParamsFormatLegacy GetFleetAgentPoliciesAgentpolicyidParamsFormat = "legacy" + GetFleetAgentPoliciesAgentpolicyidParamsFormatSimplified GetFleetAgentPoliciesAgentpolicyidParamsFormat = "simplified" +) + +// Defines values for PutFleetAgentPoliciesAgentpolicyidParamsFormat. +const ( + PutFleetAgentPoliciesAgentpolicyidParamsFormatLegacy PutFleetAgentPoliciesAgentpolicyidParamsFormat = "legacy" + PutFleetAgentPoliciesAgentpolicyidParamsFormatSimplified PutFleetAgentPoliciesAgentpolicyidParamsFormat = "simplified" +) + +// Defines values for PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled. +const ( + PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabledLogs PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled = "logs" + PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabledMetrics PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled = "metrics" + PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabledTraces PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled = "traces" +) + +// Defines values for PostFleetAgentPoliciesAgentpolicyidCopyParamsFormat. +const ( + PostFleetAgentPoliciesAgentpolicyidCopyParamsFormatLegacy PostFleetAgentPoliciesAgentpolicyidCopyParamsFormat = "legacy" + PostFleetAgentPoliciesAgentpolicyidCopyParamsFormatSimplified PostFleetAgentPoliciesAgentpolicyidCopyParamsFormat = "simplified" +) + +// Defines values for GetFleetAgentsParamsSortOrder. +const ( + GetFleetAgentsParamsSortOrderAsc GetFleetAgentsParamsSortOrder = "asc" + GetFleetAgentsParamsSortOrderDesc GetFleetAgentsParamsSortOrder = "desc" +) + +// Defines values for PostFleetAgentsBulkRequestDiagnosticsJSONBodyAdditionalMetrics. +const ( + PostFleetAgentsBulkRequestDiagnosticsJSONBodyAdditionalMetricsCPU PostFleetAgentsBulkRequestDiagnosticsJSONBodyAdditionalMetrics = "CPU" +) + +// Defines values for PostFleetAgentsAgentidActionsJSONBodyAction0Type. +const ( + POLICYREASSIGN PostFleetAgentsAgentidActionsJSONBodyAction0Type = "POLICY_REASSIGN" + UNENROLL PostFleetAgentsAgentidActionsJSONBodyAction0Type = "UNENROLL" + UPGRADE PostFleetAgentsAgentidActionsJSONBodyAction0Type = "UPGRADE" +) + +// Defines values for PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevel. +const ( + PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevelDebug PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevel = "debug" + PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevelError PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevel = "error" + PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevelInfo PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevel = "info" + PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevelWarning PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevel = "warning" +) + +// Defines values for PostFleetAgentsAgentidActionsJSONBodyAction1Type. +const ( + SETTINGS PostFleetAgentsAgentidActionsJSONBodyAction1Type = "SETTINGS" +) + +// Defines values for PostFleetAgentsAgentidRequestDiagnosticsJSONBodyAdditionalMetrics. +const ( + PostFleetAgentsAgentidRequestDiagnosticsJSONBodyAdditionalMetricsCPU PostFleetAgentsAgentidRequestDiagnosticsJSONBodyAdditionalMetrics = "CPU" +) + +// Defines values for PostFleetEpmCustomIntegrationsJSONBodyDatasetsType. +const ( + PostFleetEpmCustomIntegrationsJSONBodyDatasetsTypeLogs PostFleetEpmCustomIntegrationsJSONBodyDatasetsType = "logs" + PostFleetEpmCustomIntegrationsJSONBodyDatasetsTypeMetrics PostFleetEpmCustomIntegrationsJSONBodyDatasetsType = "metrics" + PostFleetEpmCustomIntegrationsJSONBodyDatasetsTypeProfiling PostFleetEpmCustomIntegrationsJSONBodyDatasetsType = "profiling" + PostFleetEpmCustomIntegrationsJSONBodyDatasetsTypeSynthetics PostFleetEpmCustomIntegrationsJSONBodyDatasetsType = "synthetics" + PostFleetEpmCustomIntegrationsJSONBodyDatasetsTypeTraces PostFleetEpmCustomIntegrationsJSONBodyDatasetsType = "traces" +) + +// Defines values for GetFleetEpmDataStreamsParamsType. +const ( + GetFleetEpmDataStreamsParamsTypeLogs GetFleetEpmDataStreamsParamsType = "logs" + GetFleetEpmDataStreamsParamsTypeMetrics GetFleetEpmDataStreamsParamsType = "metrics" + GetFleetEpmDataStreamsParamsTypeProfiling GetFleetEpmDataStreamsParamsType = "profiling" + GetFleetEpmDataStreamsParamsTypeSynthetics GetFleetEpmDataStreamsParamsType = "synthetics" + GetFleetEpmDataStreamsParamsTypeTraces GetFleetEpmDataStreamsParamsType = "traces" +) + +// Defines values for GetFleetEpmDataStreamsParamsSortOrder. +const ( + GetFleetEpmDataStreamsParamsSortOrderAsc GetFleetEpmDataStreamsParamsSortOrder = "asc" + GetFleetEpmDataStreamsParamsSortOrderDesc GetFleetEpmDataStreamsParamsSortOrder = "desc" +) + +// Defines values for GetFleetEpmPackagesInstalledParamsDataStreamType. +const ( + GetFleetEpmPackagesInstalledParamsDataStreamTypeLogs GetFleetEpmPackagesInstalledParamsDataStreamType = "logs" + GetFleetEpmPackagesInstalledParamsDataStreamTypeMetrics GetFleetEpmPackagesInstalledParamsDataStreamType = "metrics" + GetFleetEpmPackagesInstalledParamsDataStreamTypeProfiling GetFleetEpmPackagesInstalledParamsDataStreamType = "profiling" + GetFleetEpmPackagesInstalledParamsDataStreamTypeSynthetics GetFleetEpmPackagesInstalledParamsDataStreamType = "synthetics" + GetFleetEpmPackagesInstalledParamsDataStreamTypeTraces GetFleetEpmPackagesInstalledParamsDataStreamType = "traces" +) + +// Defines values for GetFleetEpmPackagesInstalledParamsSortOrder. +const ( + GetFleetEpmPackagesInstalledParamsSortOrderAsc GetFleetEpmPackagesInstalledParamsSortOrder = "asc" + GetFleetEpmPackagesInstalledParamsSortOrderDesc GetFleetEpmPackagesInstalledParamsSortOrder = "desc" +) + +// Defines values for GetFleetEpmTemplatesPkgnamePkgversionInputsParamsFormat. +const ( + Json GetFleetEpmTemplatesPkgnamePkgversionInputsParamsFormat = "json" + Yaml GetFleetEpmTemplatesPkgnamePkgversionInputsParamsFormat = "yaml" + Yml GetFleetEpmTemplatesPkgnamePkgversionInputsParamsFormat = "yml" +) + +// Defines values for PostFleetFleetServerHostsJSONBodySslClientAuth. +const ( + PostFleetFleetServerHostsJSONBodySslClientAuthNone PostFleetFleetServerHostsJSONBodySslClientAuth = "none" + PostFleetFleetServerHostsJSONBodySslClientAuthOptional PostFleetFleetServerHostsJSONBodySslClientAuth = "optional" + PostFleetFleetServerHostsJSONBodySslClientAuthRequired PostFleetFleetServerHostsJSONBodySslClientAuth = "required" +) + +// Defines values for PutFleetFleetServerHostsItemidJSONBodySslClientAuth. +const ( + PutFleetFleetServerHostsItemidJSONBodySslClientAuthNone PutFleetFleetServerHostsItemidJSONBodySslClientAuth = "none" + PutFleetFleetServerHostsItemidJSONBodySslClientAuthOptional PutFleetFleetServerHostsItemidJSONBodySslClientAuth = "optional" + PutFleetFleetServerHostsItemidJSONBodySslClientAuthRequired PutFleetFleetServerHostsItemidJSONBodySslClientAuth = "required" +) + +// Defines values for GetFleetPackagePoliciesParamsSortOrder. +const ( + GetFleetPackagePoliciesParamsSortOrderAsc GetFleetPackagePoliciesParamsSortOrder = "asc" + GetFleetPackagePoliciesParamsSortOrderDesc GetFleetPackagePoliciesParamsSortOrder = "desc" +) + +// Defines values for GetFleetPackagePoliciesParamsFormat. +const ( + GetFleetPackagePoliciesParamsFormatLegacy GetFleetPackagePoliciesParamsFormat = "legacy" + GetFleetPackagePoliciesParamsFormatSimplified GetFleetPackagePoliciesParamsFormat = "simplified" +) + +// Defines values for PostFleetPackagePoliciesParamsFormat. +const ( + PostFleetPackagePoliciesParamsFormatLegacy PostFleetPackagePoliciesParamsFormat = "legacy" + PostFleetPackagePoliciesParamsFormatSimplified PostFleetPackagePoliciesParamsFormat = "simplified" +) + +// Defines values for PostFleetPackagePoliciesBulkGetParamsFormat. +const ( + PostFleetPackagePoliciesBulkGetParamsFormatLegacy PostFleetPackagePoliciesBulkGetParamsFormat = "legacy" + PostFleetPackagePoliciesBulkGetParamsFormatSimplified PostFleetPackagePoliciesBulkGetParamsFormat = "simplified" +) + +// Defines values for GetFleetPackagePoliciesPackagepolicyidParamsFormat. +const ( + GetFleetPackagePoliciesPackagepolicyidParamsFormatLegacy GetFleetPackagePoliciesPackagepolicyidParamsFormat = "legacy" + GetFleetPackagePoliciesPackagepolicyidParamsFormatSimplified GetFleetPackagePoliciesPackagepolicyidParamsFormat = "simplified" +) + +// Defines values for PutFleetPackagePoliciesPackagepolicyidParamsFormat. +const ( + PutFleetPackagePoliciesPackagepolicyidParamsFormatLegacy PutFleetPackagePoliciesPackagepolicyidParamsFormat = "legacy" + PutFleetPackagePoliciesPackagepolicyidParamsFormatSimplified PutFleetPackagePoliciesPackagepolicyidParamsFormat = "simplified" +) + +// Defines values for FindListsParamsSortOrder. +const ( + FindListsParamsSortOrderAsc FindListsParamsSortOrder = "asc" + FindListsParamsSortOrderDesc FindListsParamsSortOrder = "desc" +) + +// Defines values for DeleteListItemParamsRefresh. +const ( + DeleteListItemParamsRefreshFalse DeleteListItemParamsRefresh = "false" + DeleteListItemParamsRefreshTrue DeleteListItemParamsRefresh = "true" + DeleteListItemParamsRefreshWaitFor DeleteListItemParamsRefresh = "wait_for" +) + +// Defines values for PatchListItemJSONBodyRefresh. +const ( + PatchListItemJSONBodyRefreshFalse PatchListItemJSONBodyRefresh = "false" + PatchListItemJSONBodyRefreshTrue PatchListItemJSONBodyRefresh = "true" + PatchListItemJSONBodyRefreshWaitFor PatchListItemJSONBodyRefresh = "wait_for" +) + +// Defines values for CreateListItemJSONBodyRefresh. +const ( + CreateListItemJSONBodyRefreshFalse CreateListItemJSONBodyRefresh = "false" + CreateListItemJSONBodyRefreshTrue CreateListItemJSONBodyRefresh = "true" + CreateListItemJSONBodyRefreshWaitFor CreateListItemJSONBodyRefresh = "wait_for" +) + +// Defines values for FindListItemsParamsSortOrder. +const ( + FindListItemsParamsSortOrderAsc FindListItemsParamsSortOrder = "asc" + FindListItemsParamsSortOrderDesc FindListItemsParamsSortOrder = "desc" +) + +// Defines values for ImportListItemsParamsRefresh. +const ( + ImportListItemsParamsRefreshFalse ImportListItemsParamsRefresh = "false" + ImportListItemsParamsRefreshTrue ImportListItemsParamsRefresh = "true" + ImportListItemsParamsRefreshWaitFor ImportListItemsParamsRefresh = "wait_for" +) + +// Defines values for PostSecurityRoleQueryJSONBodySortDirection. +const ( + PostSecurityRoleQueryJSONBodySortDirectionAsc PostSecurityRoleQueryJSONBodySortDirection = "asc" + PostSecurityRoleQueryJSONBodySortDirectionDesc PostSecurityRoleQueryJSONBodySortDirection = "desc" +) + +// Defines values for PostSecuritySessionInvalidateJSONBodyMatch. +const ( + PostSecuritySessionInvalidateJSONBodyMatchAll PostSecuritySessionInvalidateJSONBodyMatch = "all" + PostSecuritySessionInvalidateJSONBodyMatchQuery PostSecuritySessionInvalidateJSONBodyMatch = "query" +) + +// Defines values for GetSpacesSpaceParamsPurpose. +const ( + Any GetSpacesSpaceParamsPurpose = "any" + CopySavedObjectsIntoSpace GetSpacesSpaceParamsPurpose = "copySavedObjectsIntoSpace" + ShareSavedObjectsIntoSpace GetSpacesSpaceParamsPurpose = "shareSavedObjectsIntoSpace" +) + +// Defines values for GetSpacesSpaceParamsIncludeAuthorizedPurposes0. +const ( + GetSpacesSpaceParamsIncludeAuthorizedPurposes0False GetSpacesSpaceParamsIncludeAuthorizedPurposes0 = false +) + +// Defines values for PostSpacesSpaceJSONBodySolution. +const ( + PostSpacesSpaceJSONBodySolutionClassic PostSpacesSpaceJSONBodySolution = "classic" + PostSpacesSpaceJSONBodySolutionEs PostSpacesSpaceJSONBodySolution = "es" + PostSpacesSpaceJSONBodySolutionOblt PostSpacesSpaceJSONBodySolution = "oblt" + PostSpacesSpaceJSONBodySolutionSecurity PostSpacesSpaceJSONBodySolution = "security" +) + +// Defines values for PutSpacesSpaceIdJSONBodySolution. +const ( + PutSpacesSpaceIdJSONBodySolutionClassic PutSpacesSpaceIdJSONBodySolution = "classic" + PutSpacesSpaceIdJSONBodySolutionEs PutSpacesSpaceIdJSONBodySolution = "es" + PutSpacesSpaceIdJSONBodySolutionOblt PutSpacesSpaceIdJSONBodySolution = "oblt" + PutSpacesSpaceIdJSONBodySolutionSecurity PutSpacesSpaceIdJSONBodySolution = "security" +) + +// Defines values for PutStreamsNameJSONBody00StreamIngestProcessingSteps00Action. +const ( + PutStreamsNameJSONBody00StreamIngestProcessingSteps00ActionGrok PutStreamsNameJSONBody00StreamIngestProcessingSteps00Action = "grok" +) + +// Defines values for PutStreamsNameJSONBody00StreamIngestProcessingSteps01Action. +const ( + PutStreamsNameJSONBody00StreamIngestProcessingSteps01ActionDissect PutStreamsNameJSONBody00StreamIngestProcessingSteps01Action = "dissect" +) + +// Defines values for PutStreamsNameJSONBody00StreamIngestProcessingSteps02Action. +const ( + PutStreamsNameJSONBody00StreamIngestProcessingSteps02ActionDate PutStreamsNameJSONBody00StreamIngestProcessingSteps02Action = "date" +) + +// Defines values for PutStreamsNameJSONBody00StreamIngestProcessingSteps03Action. +const ( + PutStreamsNameJSONBody00StreamIngestProcessingSteps03ActionRename PutStreamsNameJSONBody00StreamIngestProcessingSteps03Action = "rename" +) + +// Defines values for PutStreamsNameJSONBody00StreamIngestProcessingSteps04Action. +const ( + PutStreamsNameJSONBody00StreamIngestProcessingSteps04ActionSet PutStreamsNameJSONBody00StreamIngestProcessingSteps04Action = "set" +) + +// Defines values for PutStreamsNameJSONBody00StreamIngestProcessingSteps05Action. +const ( + PutStreamsNameJSONBody00StreamIngestProcessingSteps05ActionAppend PutStreamsNameJSONBody00StreamIngestProcessingSteps05Action = "append" +) + +// Defines values for PutStreamsNameJSONBody00StreamIngestProcessingSteps06Action. +const ( + PutStreamsNameJSONBody00StreamIngestProcessingSteps06ActionManualIngestPipeline PutStreamsNameJSONBody00StreamIngestProcessingSteps06Action = "manual_ingest_pipeline" +) + +// Defines values for PutStreamsNameJSONBody01StreamIngestProcessingSteps00Action. +const ( + PutStreamsNameJSONBody01StreamIngestProcessingSteps00ActionGrok PutStreamsNameJSONBody01StreamIngestProcessingSteps00Action = "grok" +) + +// Defines values for PutStreamsNameJSONBody01StreamIngestProcessingSteps01Action. +const ( + PutStreamsNameJSONBody01StreamIngestProcessingSteps01ActionDissect PutStreamsNameJSONBody01StreamIngestProcessingSteps01Action = "dissect" +) + +// Defines values for PutStreamsNameJSONBody01StreamIngestProcessingSteps02Action. +const ( + PutStreamsNameJSONBody01StreamIngestProcessingSteps02ActionDate PutStreamsNameJSONBody01StreamIngestProcessingSteps02Action = "date" +) + +// Defines values for PutStreamsNameJSONBody01StreamIngestProcessingSteps03Action. +const ( + PutStreamsNameJSONBody01StreamIngestProcessingSteps03ActionRename PutStreamsNameJSONBody01StreamIngestProcessingSteps03Action = "rename" +) + +// Defines values for PutStreamsNameJSONBody01StreamIngestProcessingSteps04Action. +const ( + PutStreamsNameJSONBody01StreamIngestProcessingSteps04ActionSet PutStreamsNameJSONBody01StreamIngestProcessingSteps04Action = "set" +) + +// Defines values for PutStreamsNameJSONBody01StreamIngestProcessingSteps05Action. +const ( + PutStreamsNameJSONBody01StreamIngestProcessingSteps05ActionAppend PutStreamsNameJSONBody01StreamIngestProcessingSteps05Action = "append" +) + +// Defines values for PutStreamsNameJSONBody01StreamIngestProcessingSteps06Action. +const ( + PutStreamsNameJSONBody01StreamIngestProcessingSteps06ActionManualIngestPipeline PutStreamsNameJSONBody01StreamIngestProcessingSteps06Action = "manual_ingest_pipeline" +) + +// Defines values for PostStreamsNameForkJSONBodyStatus. +const ( + PostStreamsNameForkJSONBodyStatusDisabled PostStreamsNameForkJSONBodyStatus = "disabled" + PostStreamsNameForkJSONBodyStatusEnabled PostStreamsNameForkJSONBodyStatus = "enabled" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Action. +const ( + PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00ActionGrok PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Action = "grok" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Action. +const ( + PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01ActionDissect PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Action = "dissect" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Action. +const ( + PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02ActionDate PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Action = "date" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Action. +const ( + PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03ActionRename PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Action = "rename" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Action. +const ( + PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04ActionSet PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Action = "set" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Action. +const ( + PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05ActionAppend PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Action = "append" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Action. +const ( + PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06ActionManualIngestPipeline PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Action = "manual_ingest_pipeline" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest0WiredRoutingStatus. +const ( + PutStreamsNameIngestJSONBodyIngest0WiredRoutingStatusDisabled PutStreamsNameIngestJSONBodyIngest0WiredRoutingStatus = "disabled" + PutStreamsNameIngestJSONBodyIngest0WiredRoutingStatusEnabled PutStreamsNameIngestJSONBodyIngest0WiredRoutingStatus = "enabled" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Action. +const ( + PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00ActionGrok PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Action = "grok" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Action. +const ( + PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01ActionDissect PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Action = "dissect" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Action. +const ( + PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02ActionDate PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Action = "date" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Action. +const ( + PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03ActionRename PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Action = "rename" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Action. +const ( + PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04ActionSet PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Action = "set" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Action. +const ( + PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05ActionAppend PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Action = "append" +) + +// Defines values for PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Action. +const ( + PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06ActionManualIngestPipeline PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Action = "manual_ingest_pipeline" +) + +// Defines values for GetSyntheticMonitorsParamsMonitorTypes0. +const ( + GetSyntheticMonitorsParamsMonitorTypes0Browser GetSyntheticMonitorsParamsMonitorTypes0 = "browser" + GetSyntheticMonitorsParamsMonitorTypes0Http GetSyntheticMonitorsParamsMonitorTypes0 = "http" + GetSyntheticMonitorsParamsMonitorTypes0Icmp GetSyntheticMonitorsParamsMonitorTypes0 = "icmp" + GetSyntheticMonitorsParamsMonitorTypes0Tcp GetSyntheticMonitorsParamsMonitorTypes0 = "tcp" +) + +// Defines values for GetSyntheticMonitorsParamsSortField. +const ( + GetSyntheticMonitorsParamsSortFieldCreatedAt GetSyntheticMonitorsParamsSortField = "createdAt" + GetSyntheticMonitorsParamsSortFieldName GetSyntheticMonitorsParamsSortField = "name" + GetSyntheticMonitorsParamsSortFieldStatus GetSyntheticMonitorsParamsSortField = "status" + GetSyntheticMonitorsParamsSortFieldUpdatedAt GetSyntheticMonitorsParamsSortField = "updatedAt" +) + +// Defines values for GetSyntheticMonitorsParamsSortOrder. +const ( + GetSyntheticMonitorsParamsSortOrderAsc GetSyntheticMonitorsParamsSortOrder = "asc" + GetSyntheticMonitorsParamsSortOrderDesc GetSyntheticMonitorsParamsSortOrder = "desc" +) + +// Defines values for GetSyntheticMonitorsParamsUseLogicalAndFor. +const ( + Locations GetSyntheticMonitorsParamsUseLogicalAndFor = "locations" + Tags GetSyntheticMonitorsParamsUseLogicalAndFor = "tags" +) + +// Defines values for ImportTimelinesJSONBodyIsImmutable. +const ( + ImportTimelinesJSONBodyIsImmutableFalse ImportTimelinesJSONBodyIsImmutable = "false" + ImportTimelinesJSONBodyIsImmutableTrue ImportTimelinesJSONBodyIsImmutable = "true" +) + +// Defines values for GetTimelinesParamsOnlyUserFavorite. +const ( + False GetTimelinesParamsOnlyUserFavorite = "false" + True GetTimelinesParamsOnlyUserFavorite = "true" +) + +// Defines values for GetTimelinesParamsSortOrder. +const ( + GetTimelinesParamsSortOrderAsc GetTimelinesParamsSortOrder = "asc" + GetTimelinesParamsSortOrderDesc GetTimelinesParamsSortOrder = "desc" +) + +// Defines values for FindSlosOpParamsSortBy. +const ( + ErrorBudgetConsumed FindSlosOpParamsSortBy = "error_budget_consumed" + ErrorBudgetRemaining FindSlosOpParamsSortBy = "error_budget_remaining" + SliValue FindSlosOpParamsSortBy = "sli_value" + Status FindSlosOpParamsSortBy = "status" +) + +// Defines values for FindSlosOpParamsSortDirection. +const ( + Asc FindSlosOpParamsSortDirection = "asc" + Desc FindSlosOpParamsSortDirection = "desc" +) + +// APMUI400Response defines model for APM_UI_400_response. +type APMUI400Response struct { + // Error Error type + Error *string `json:"error,omitempty"` + + // Message Error message + Message *string `json:"message,omitempty"` + + // StatusCode Error status code + StatusCode *float32 `json:"statusCode,omitempty"` +} + +// APMUI401Response defines model for APM_UI_401_response. +type APMUI401Response struct { + // Error Error type + Error *string `json:"error,omitempty"` + + // Message Error message + Message *string `json:"message,omitempty"` + + // StatusCode Error status code + StatusCode *float32 `json:"statusCode,omitempty"` +} + +// APMUI403Response defines model for APM_UI_403_response. +type APMUI403Response struct { + // Error Error type + Error *string `json:"error,omitempty"` + + // Message Error message + Message *string `json:"message,omitempty"` + + // StatusCode Error status code + StatusCode *float32 `json:"statusCode,omitempty"` +} + +// APMUI404Response defines model for APM_UI_404_response. +type APMUI404Response struct { + // Error Error type + Error *string `json:"error,omitempty"` + + // Message Error message + Message *string `json:"message,omitempty"` + + // StatusCode Error status code + StatusCode *float32 `json:"statusCode,omitempty"` +} + +// APMUI500Response defines model for APM_UI_500_response. +type APMUI500Response struct { + // Error Error type + Error *string `json:"error,omitempty"` + + // Message Error message + Message *string `json:"message,omitempty"` + + // StatusCode Error status code + StatusCode *float32 `json:"statusCode,omitempty"` +} + +// APMUI501Response defines model for APM_UI_501_response. +type APMUI501Response struct { + // Error Error type + Error *string `json:"error,omitempty"` + + // Message Error message + Message *string `json:"message,omitempty"` + + // StatusCode Error status code + StatusCode *float32 `json:"statusCode,omitempty"` +} + +// APMUIAgentConfigurationIntakeObject defines model for APM_UI_agent_configuration_intake_object. +type APMUIAgentConfigurationIntakeObject struct { + // AgentName The agent name is used by the UI to determine which settings to display. + AgentName *string `json:"agent_name,omitempty"` + + // Service Service + Service APMUIServiceObject `json:"service"` + + // Settings Agent configuration settings + Settings APMUISettingsObject `json:"settings"` +} + +// APMUIAgentConfigurationObject Agent configuration +type APMUIAgentConfigurationObject struct { + // Timestamp Timestamp + Timestamp float32 `json:"@timestamp"` + + // AgentName Agent name + AgentName *string `json:"agent_name,omitempty"` + + // AppliedByAgent Applied by agent + AppliedByAgent *bool `json:"applied_by_agent,omitempty"` + + // Etag `etag` is sent by the APM agent to indicate the `etag` of the last successfully applied configuration. If the `etag` matches an existing configuration its `applied_by_agent` property will be set to `true`. Every time a configuration is edited `applied_by_agent` is reset to `false`. + Etag string `json:"etag"` + + // Service Service + Service APMUIServiceObject `json:"service"` + + // Settings Agent configuration settings + Settings APMUISettingsObject `json:"settings"` +} + +// APMUIAgentConfigurationsResponse defines model for APM_UI_agent_configurations_response. +type APMUIAgentConfigurationsResponse struct { + // Configurations Agent configuration + Configurations *[]APMUIAgentConfigurationObject `json:"configurations,omitempty"` +} + +// APMUIAgentKeysObject defines model for APM_UI_agent_keys_object. +type APMUIAgentKeysObject struct { + // Name The name of the APM agent key. + Name string `json:"name"` + + // Privileges The APM agent key privileges. It can take one or more of the following values: + // * `event:write`, which is required for ingesting APM agent events. * `config_agent:read`, which is required for APM agents to read agent configuration remotely. + Privileges []APMUIAgentKeysObjectPrivileges `json:"privileges"` +} + +// APMUIAgentKeysObjectPrivileges defines model for APMUIAgentKeysObject.Privileges. +type APMUIAgentKeysObjectPrivileges string + +// APMUIAgentKeysResponse defines model for APM_UI_agent_keys_response. +type APMUIAgentKeysResponse struct { + // AgentKey Agent key + AgentKey *struct { + ApiKey string `json:"api_key"` + Encoded string `json:"encoded"` + Expiration *int64 `json:"expiration,omitempty"` + Id string `json:"id"` + Name string `json:"name"` + } `json:"agentKey,omitempty"` +} + +// APMUIAnnotationSearchResponse defines model for APM_UI_annotation_search_response. +type APMUIAnnotationSearchResponse struct { + // Annotations Annotations + Annotations *[]struct { + Timestamp *float32 `json:"@timestamp,omitempty"` + Id *string `json:"id,omitempty"` + Text *string `json:"text,omitempty"` + Type *APMUIAnnotationSearchResponseAnnotationsType `json:"type,omitempty"` + } `json:"annotations,omitempty"` +} + +// APMUIAnnotationSearchResponseAnnotationsType defines model for APMUIAnnotationSearchResponse.Annotations.Type. +type APMUIAnnotationSearchResponseAnnotationsType string + +// APMUIBaseSourceMapObject defines model for APM_UI_base_source_map_object. +type APMUIBaseSourceMapObject struct { + // CompressionAlgorithm Compression Algorithm + CompressionAlgorithm *string `json:"compressionAlgorithm,omitempty"` + + // Created Created date + Created *string `json:"created,omitempty"` + + // DecodedSha256 Decoded SHA-256 + DecodedSha256 *string `json:"decodedSha256,omitempty"` + + // DecodedSize Decoded size + DecodedSize *float32 `json:"decodedSize,omitempty"` + + // EncodedSha256 Encoded SHA-256 + EncodedSha256 *string `json:"encodedSha256,omitempty"` + + // EncodedSize Encoded size + EncodedSize *float32 `json:"encodedSize,omitempty"` + + // EncryptionAlgorithm Encryption Algorithm + EncryptionAlgorithm *string `json:"encryptionAlgorithm,omitempty"` + + // Id Identifier + Id *string `json:"id,omitempty"` + + // Identifier Identifier + Identifier *string `json:"identifier,omitempty"` + + // PackageName Package name + PackageName *string `json:"packageName,omitempty"` + + // RelativeUrl Relative URL + RelativeUrl *string `json:"relative_url,omitempty"` + + // Type Type + Type *string `json:"type,omitempty"` +} + +// APMUICreateAnnotationObject defines model for APM_UI_create_annotation_object. +type APMUICreateAnnotationObject struct { + // Timestamp The date and time of the annotation. It must be in ISO 8601 format. + Timestamp string `json:"@timestamp"` + + // Message The message displayed in the annotation. It defaults to `service.version`. + Message *string `json:"message,omitempty"` + + // Service The service that identifies the configuration to create or update. + Service struct { + // Environment The environment of the service. + Environment *string `json:"environment,omitempty"` + + // Version The version of the service. + Version string `json:"version"` + } `json:"service"` + + // Tags Tags are used by the Applications UI to distinguish APM annotations from other annotations. Tags may have additional functionality in future releases. It defaults to `[apm]`. While you can add additional tags, you cannot remove the `apm` tag. + Tags *[]string `json:"tags,omitempty"` +} + +// APMUICreateAnnotationResponse defines model for APM_UI_create_annotation_response. +type APMUICreateAnnotationResponse struct { + // UnderscoreId Identifier + UnderscoreId *string `json:"_id,omitempty"` + + // UnderscoreIndex Index + UnderscoreIndex *string `json:"_index,omitempty"` + + // UnderscoreSource Response + UnderscoreSource *struct { + Timestamp *string `json:"@timestamp,omitempty"` + Annotation *struct { + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"annotation,omitempty"` + Event *struct { + Created *string `json:"created,omitempty"` + } `json:"event,omitempty"` + Message *string `json:"message,omitempty"` + Service *struct { + Environment *string `json:"environment,omitempty"` + Name *string `json:"name,omitempty"` + Version *string `json:"version,omitempty"` + } `json:"service,omitempty"` + Tags *[]string `json:"tags,omitempty"` + } `json:"_source,omitempty"` +} + +// APMUIDeleteAgentConfigurationsResponse defines model for APM_UI_delete_agent_configurations_response. +type APMUIDeleteAgentConfigurationsResponse struct { + // Result Result + Result *string `json:"result,omitempty"` +} + +// APMUIDeleteServiceObject Service +type APMUIDeleteServiceObject struct { + // Service Service + Service APMUIServiceObject `json:"service"` +} + +// APMUISearchAgentConfigurationObject defines model for APM_UI_search_agent_configuration_object. +type APMUISearchAgentConfigurationObject struct { + // Etag If etags match then `applied_by_agent` field will be set to `true` + Etag *string `json:"etag,omitempty"` + + // MarkAsAppliedByAgent `markAsAppliedByAgent=true` means "force setting it to true regardless of etag". + // This is needed for Jaeger agent that doesn't have etags + MarkAsAppliedByAgent *bool `json:"mark_as_applied_by_agent,omitempty"` + + // Service Service + Service APMUIServiceObject `json:"service"` +} + +// APMUISearchAgentConfigurationResponse defines model for APM_UI_search_agent_configuration_response. +type APMUISearchAgentConfigurationResponse struct { + // UnderscoreId Identifier + UnderscoreId *string `json:"_id,omitempty"` + + // UnderscoreIndex Index + UnderscoreIndex *string `json:"_index,omitempty"` + + // UnderscoreScore Score + UnderscoreScore *float32 `json:"_score,omitempty"` + + // UnderscoreSource Agent configuration + UnderscoreSource *APMUIAgentConfigurationObject `json:"_source,omitempty"` +} + +// APMUIServiceAgentNameResponse defines model for APM_UI_service_agent_name_response. +type APMUIServiceAgentNameResponse struct { + // AgentName Agent name + AgentName *string `json:"agentName,omitempty"` +} + +// APMUIServiceEnvironmentObject defines model for APM_UI_service_environment_object. +type APMUIServiceEnvironmentObject struct { + // AlreadyConfigured Already configured + AlreadyConfigured *bool `json:"alreadyConfigured,omitempty"` + + // Name Service environment name + Name *string `json:"name,omitempty"` +} + +// APMUIServiceEnvironmentsResponse defines model for APM_UI_service_environments_response. +type APMUIServiceEnvironmentsResponse struct { + // Environments Service environment list + Environments *[]APMUIServiceEnvironmentObject `json:"environments,omitempty"` +} + +// APMUIServiceObject Service +type APMUIServiceObject struct { + // Environment The environment of the service. + Environment *string `json:"environment,omitempty"` + + // Name The name of the service. + Name *string `json:"name,omitempty"` +} + +// APMUISettingsObject Agent configuration settings +type APMUISettingsObject map[string]string + +// APMUISingleAgentConfigurationResponse defines model for APM_UI_single_agent_configuration_response. +type APMUISingleAgentConfigurationResponse struct { + // Timestamp Timestamp + Timestamp float32 `json:"@timestamp"` + + // AgentName Agent name + AgentName *string `json:"agent_name,omitempty"` + + // AppliedByAgent Applied by agent + AppliedByAgent *bool `json:"applied_by_agent,omitempty"` + + // Etag `etag` is sent by the APM agent to indicate the `etag` of the last successfully applied configuration. If the `etag` matches an existing configuration its `applied_by_agent` property will be set to `true`. Every time a configuration is edited `applied_by_agent` is reset to `false`. + Etag string `json:"etag"` + Id string `json:"id"` + + // Service Service + Service APMUIServiceObject `json:"service"` + + // Settings Agent configuration settings + Settings APMUISettingsObject `json:"settings"` +} + +// APMUISourceMapsResponse defines model for APM_UI_source_maps_response. +type APMUISourceMapsResponse struct { + // Artifacts Artifacts + Artifacts *[]struct { + Body *struct { + BundleFilepath *string `json:"bundleFilepath,omitempty"` + ServiceName *string `json:"serviceName,omitempty"` + ServiceVersion *string `json:"serviceVersion,omitempty"` + SourceMap *struct { + File *string `json:"file,omitempty"` + Mappings *string `json:"mappings,omitempty"` + SourceRoot *string `json:"sourceRoot,omitempty"` + Sources *[]string `json:"sources,omitempty"` + SourcesContent *[]string `json:"sourcesContent,omitempty"` + Version *float32 `json:"version,omitempty"` + } `json:"sourceMap,omitempty"` + } `json:"body,omitempty"` + + // CompressionAlgorithm Compression Algorithm + CompressionAlgorithm *string `json:"compressionAlgorithm,omitempty"` + + // Created Created date + Created *string `json:"created,omitempty"` + + // DecodedSha256 Decoded SHA-256 + DecodedSha256 *string `json:"decodedSha256,omitempty"` + + // DecodedSize Decoded size + DecodedSize *float32 `json:"decodedSize,omitempty"` + + // EncodedSha256 Encoded SHA-256 + EncodedSha256 *string `json:"encodedSha256,omitempty"` + + // EncodedSize Encoded size + EncodedSize *float32 `json:"encodedSize,omitempty"` + + // EncryptionAlgorithm Encryption Algorithm + EncryptionAlgorithm *string `json:"encryptionAlgorithm,omitempty"` + + // Id Identifier + Id *string `json:"id,omitempty"` + + // Identifier Identifier + Identifier *string `json:"identifier,omitempty"` + + // PackageName Package name + PackageName *string `json:"packageName,omitempty"` + + // RelativeUrl Relative URL + RelativeUrl *string `json:"relative_url,omitempty"` + + // Type Type + Type *string `json:"type,omitempty"` + } `json:"artifacts,omitempty"` +} + +// APMUIUploadSourceMapObject defines model for APM_UI_upload_source_map_object. +type APMUIUploadSourceMapObject struct { + // BundleFilepath The absolute path of the final bundle as used in the web application. + BundleFilepath string `json:"bundle_filepath"` + + // ServiceName The name of the service that the service map should apply to. + ServiceName string `json:"service_name"` + + // ServiceVersion The version of the service that the service map should apply to. + ServiceVersion string `json:"service_version"` + + // Sourcemap The source map. It can be a string or file upload. It must follow the + // [source map format specification](https://tc39.es/ecma426/). + Sourcemap openapi_types.File `json:"sourcemap"` +} + +// APMUIUploadSourceMapsResponse defines model for APM_UI_upload_source_maps_response. +type APMUIUploadSourceMapsResponse struct { + Body *string `json:"body,omitempty"` + + // CompressionAlgorithm Compression Algorithm + CompressionAlgorithm *string `json:"compressionAlgorithm,omitempty"` + + // Created Created date + Created *string `json:"created,omitempty"` + + // DecodedSha256 Decoded SHA-256 + DecodedSha256 *string `json:"decodedSha256,omitempty"` + + // DecodedSize Decoded size + DecodedSize *float32 `json:"decodedSize,omitempty"` + + // EncodedSha256 Encoded SHA-256 + EncodedSha256 *string `json:"encodedSha256,omitempty"` + + // EncodedSize Encoded size + EncodedSize *float32 `json:"encodedSize,omitempty"` + + // EncryptionAlgorithm Encryption Algorithm + EncryptionAlgorithm *string `json:"encryptionAlgorithm,omitempty"` + + // Id Identifier + Id *string `json:"id,omitempty"` + + // Identifier Identifier + Identifier *string `json:"identifier,omitempty"` + + // PackageName Package name + PackageName *string `json:"packageName,omitempty"` + + // RelativeUrl Relative URL + RelativeUrl *string `json:"relative_url,omitempty"` + + // Type Type + Type *string `json:"type,omitempty"` +} + +// Alerting401Response defines model for Alerting_401_response. +type Alerting401Response struct { + Error *Alerting401ResponseError `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *Alerting401ResponseStatusCode `json:"statusCode,omitempty"` +} + +// Alerting401ResponseError defines model for Alerting401Response.Error. +type Alerting401ResponseError string + +// Alerting401ResponseStatusCode defines model for Alerting401Response.StatusCode. +type Alerting401ResponseStatusCode int + +// AlertingFieldmapProperties defines model for Alerting_fieldmap_properties. +type AlertingFieldmapProperties struct { + // Array Indicates whether the field is an array. + Array *bool `json:"array,omitempty"` + + // Dynamic Indicates whether it is a dynamic field mapping. + Dynamic *bool `json:"dynamic,omitempty"` + + // Format Indicates the format of the field. For example, if the `type` is `date_range`, the `format` can be `epoch_millis||strict_date_optional_time`. + Format *string `json:"format,omitempty"` + + // IgnoreAbove Specifies the maximum length of a string field. Longer strings are not indexed or stored. + IgnoreAbove *int `json:"ignore_above,omitempty"` + + // Index Indicates whether field values are indexed. + Index *bool `json:"index,omitempty"` + + // Path TBD + Path *string `json:"path,omitempty"` + + // Properties Details about the object properties. This property is applicable when `type` is `object`. + Properties *map[string]struct { + // Type The data type for each object property. + Type *string `json:"type,omitempty"` + } `json:"properties,omitempty"` + + // Required Indicates whether the field is required. + Required *bool `json:"required,omitempty"` + + // ScalingFactor The scaling factor to use when encoding values. This property is applicable when `type` is `scaled_float`. Values will be multiplied by this factor at index time and rounded to the closest long value. + ScalingFactor *int `json:"scaling_factor,omitempty"` + + // Type Specifies the data type for the field. + Type *string `json:"type,omitempty"` +} + +// Cases4xxResponse defines model for Cases_4xx_response. +type Cases4xxResponse struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *int `json:"statusCode,omitempty"` +} + +// CasesActions defines model for Cases_actions. +type CasesActions string + +// CasesAddAlertCommentRequestProperties Defines properties for case comment requests when type is alert. +type CasesAddAlertCommentRequestProperties struct { + // AlertId The alert identifiers. It is required only when `type` is `alert`. You can use an array of strings to add multiple alerts to a case, provided that they all relate to the same rule; `index` must also be an array with the same length or number of elements. Adding multiple alerts in this manner is recommended rather than calling the API multiple times. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. + AlertId CasesAlertIdentifiers `json:"alertId"` + + // Index The alert indices. It is required only when `type` is `alert`. If you are adding multiple alerts to a case, use an array of strings; the position of each index name in the array must match the position of the corresponding alert identifier in the `alertId` array. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. + Index CasesAlertIndices `json:"index"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner CasesOwner `json:"owner"` + + // Rule The rule that is associated with the alerts. It is required only when `type` is `alert`. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. + Rule CasesRule `json:"rule"` + + // Type The type of comment. + Type CasesAddAlertCommentRequestPropertiesType `json:"type"` +} + +// CasesAddAlertCommentRequestPropertiesType The type of comment. +type CasesAddAlertCommentRequestPropertiesType string + +// CasesAddCaseCommentRequest The add comment to case API request body varies depending on whether you are adding an alert or a comment. +type CasesAddCaseCommentRequest struct { + union json.RawMessage +} + +// CasesAddCaseFileRequest Defines the file that will be attached to the case. Optional parameters will be generated automatically from the file metadata if not defined. +type CasesAddCaseFileRequest struct { + // File The file being attached to the case. + File openapi_types.File `json:"file"` + + // Filename The desired name of the file being attached to the case, it can be different than the name of the file in the filesystem. **This should not include the file extension.** + Filename *string `json:"filename,omitempty"` +} + +// CasesAddUserCommentRequestProperties Defines properties for case comment requests when type is user. +type CasesAddUserCommentRequestProperties struct { + // Comment The new comment. It is required only when `type` is `user`. + Comment string `json:"comment"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner CasesOwner `json:"owner"` + + // Type The type of comment. + Type CasesAddUserCommentRequestPropertiesType `json:"type"` +} + +// CasesAddUserCommentRequestPropertiesType The type of comment. +type CasesAddUserCommentRequestPropertiesType string + +// CasesAlertCommentResponseProperties defines model for Cases_alert_comment_response_properties. +type CasesAlertCommentResponseProperties struct { + AlertId *[]string `json:"alertId,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + CreatedBy *struct { + Email *string `json:"email,omitempty"` + FullName *string `json:"full_name,omitempty"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username,omitempty"` + } `json:"created_by,omitempty"` + Id *string `json:"id,omitempty"` + Index *[]string `json:"index,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner *CasesOwner `json:"owner,omitempty"` + PushedAt *time.Time `json:"pushed_at,omitempty"` + PushedBy *struct { + Email *string `json:"email,omitempty"` + FullName *string `json:"full_name,omitempty"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username,omitempty"` + } `json:"pushed_by,omitempty"` + Rule *struct { + // Id The rule identifier. + Id *string `json:"id,omitempty"` + + // Name The rule name. + Name *string `json:"name,omitempty"` + } `json:"rule,omitempty"` + Type CasesAlertCommentResponsePropertiesType `json:"type"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` + UpdatedBy *struct { + Email *string `json:"email,omitempty"` + FullName *string `json:"full_name,omitempty"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username,omitempty"` + } `json:"updated_by,omitempty"` + Version *string `json:"version,omitempty"` +} + +// CasesAlertCommentResponsePropertiesType defines model for CasesAlertCommentResponseProperties.Type. +type CasesAlertCommentResponsePropertiesType string + +// CasesAlertIdentifiers The alert identifiers. It is required only when `type` is `alert`. You can use an array of strings to add multiple alerts to a case, provided that they all relate to the same rule; `index` must also be an array with the same length or number of elements. Adding multiple alerts in this manner is recommended rather than calling the API multiple times. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. +type CasesAlertIdentifiers struct { + union json.RawMessage +} + +// CasesAlertIdentifiers0 defines model for . +type CasesAlertIdentifiers0 = string + +// CasesAlertIdentifiers1 defines model for . +type CasesAlertIdentifiers1 = []string + +// CasesAlertIndices The alert indices. It is required only when `type` is `alert`. If you are adding multiple alerts to a case, use an array of strings; the position of each index name in the array must match the position of the corresponding alert identifier in the `alertId` array. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. +type CasesAlertIndices struct { + union json.RawMessage +} + +// CasesAlertIndices0 defines model for . +type CasesAlertIndices0 = string + +// CasesAlertIndices1 defines model for . +type CasesAlertIndices1 = []string + +// CasesAlertResponseProperties defines model for Cases_alert_response_properties. +type CasesAlertResponseProperties struct { + AttachedAt *time.Time `json:"attached_at,omitempty"` + + // Id The alert identifier. + Id *string `json:"id,omitempty"` + + // Index The alert index. + Index *string `json:"index,omitempty"` +} + +// CasesAssignees An array containing users that are assigned to the case. +type CasesAssignees = []struct { + // Uid A unique identifier for the user profile. These identifiers can be found by using the suggest user profile API. + Uid string `json:"uid"` +} + +// CasesCaseCategories defines model for Cases_case_categories. +type CasesCaseCategories = []CasesCaseCategory + +// CasesCaseCategory A word or phrase that categorizes the case. +type CasesCaseCategory = string + +// CasesCaseDescription The description for the case. +type CasesCaseDescription = string + +// CasesCaseResponseClosedByProperties defines model for Cases_case_response_closed_by_properties. +type CasesCaseResponseClosedByProperties struct { + Email *string `json:"email,omitempty"` + FullName *string `json:"full_name,omitempty"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username,omitempty"` +} + +// CasesCaseResponseCreatedByProperties defines model for Cases_case_response_created_by_properties. +type CasesCaseResponseCreatedByProperties struct { + Email *string `json:"email,omitempty"` + FullName *string `json:"full_name,omitempty"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username,omitempty"` +} + +// CasesCaseResponseProperties defines model for Cases_case_response_properties. +type CasesCaseResponseProperties struct { + // Assignees An array containing users that are assigned to the case. + Assignees *CasesAssignees `json:"assignees,omitempty"` + + // Category The case category. + Category *string `json:"category,omitempty"` + ClosedAt *time.Time `json:"closed_at,omitempty"` + ClosedBy *CasesCaseResponseClosedByProperties `json:"closed_by,omitempty"` + + // Comments An array of comment objects for the case. + Comments []CasesCaseResponseProperties_Comments_Item `json:"comments"` + Connector CasesCaseResponseProperties_Connector `json:"connector"` + CreatedAt time.Time `json:"created_at"` + CreatedBy CasesCaseResponseCreatedByProperties `json:"created_by"` + + // CustomFields Custom field values for the case. + CustomFields *[]struct { + // Key The unique identifier for the custom field. The key value must exist in the case configuration settings. + Key *string `json:"key,omitempty"` + + // Type The custom field type. It must match the type specified in the case configuration settings. + Type *CasesCaseResponsePropertiesCustomFieldsType `json:"type,omitempty"` + + // Value The custom field value. If the custom field is required, it cannot be explicitly set to null. However, for cases that existed when the required custom field was added, the default value stored in Elasticsearch is `undefined`. The value returned in the API and user interface in this case is `null`. + Value *CasesCaseResponseProperties_CustomFields_Value `json:"value,omitempty"` + } `json:"customFields,omitempty"` + Description string `json:"description"` + + // Duration The elapsed time from the creation of the case to its closure (in seconds). If the case has not been closed, the duration is set to null. If the case was closed after less than half a second, the duration is rounded down to zero. + Duration *int `json:"duration,omitempty"` + ExternalService *CasesExternalService `json:"external_service,omitempty"` + Id string `json:"id"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner CasesOwner `json:"owner"` + + // Settings An object that contains the case settings. + Settings CasesSettings `json:"settings"` + + // Severity The severity of the case. + Severity CasesCaseSeverity `json:"severity"` + + // Status The status of the case. + Status CasesCaseStatus `json:"status"` + Tags []string `json:"tags"` + Title string `json:"title"` + TotalAlerts int `json:"totalAlerts"` + TotalComment int `json:"totalComment"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` + UpdatedBy *CasesCaseResponseUpdatedByProperties `json:"updated_by,omitempty"` + Version string `json:"version"` +} + +// CasesCaseResponseProperties_Comments_Item defines model for Cases_case_response_properties.comments.Item. +type CasesCaseResponseProperties_Comments_Item struct { + union json.RawMessage +} + +// CasesCaseResponseProperties_Connector defines model for CasesCaseResponseProperties.Connector. +type CasesCaseResponseProperties_Connector struct { + union json.RawMessage +} + +// CasesCaseResponsePropertiesCustomFieldsType The custom field type. It must match the type specified in the case configuration settings. +type CasesCaseResponsePropertiesCustomFieldsType string + +// CasesCaseResponsePropertiesCustomFieldsValue0 defines model for . +type CasesCaseResponsePropertiesCustomFieldsValue0 = string + +// CasesCaseResponsePropertiesCustomFieldsValue1 defines model for . +type CasesCaseResponsePropertiesCustomFieldsValue1 = bool + +// CasesCaseResponseProperties_CustomFields_Value The custom field value. If the custom field is required, it cannot be explicitly set to null. However, for cases that existed when the required custom field was added, the default value stored in Elasticsearch is `undefined`. The value returned in the API and user interface in this case is `null`. +type CasesCaseResponseProperties_CustomFields_Value struct { + union json.RawMessage +} + +// CasesCaseResponsePushedByProperties defines model for Cases_case_response_pushed_by_properties. +type CasesCaseResponsePushedByProperties struct { + Email *string `json:"email,omitempty"` + FullName *string `json:"full_name,omitempty"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username,omitempty"` +} + +// CasesCaseResponseUpdatedByProperties defines model for Cases_case_response_updated_by_properties. +type CasesCaseResponseUpdatedByProperties struct { + Email *string `json:"email,omitempty"` + FullName *string `json:"full_name,omitempty"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username,omitempty"` +} + +// CasesCaseSeverity The severity of the case. +type CasesCaseSeverity string + +// CasesCaseStatus The status of the case. +type CasesCaseStatus string + +// CasesCaseTags The words and phrases that help categorize cases. It can be an empty array. +type CasesCaseTags = []string + +// CasesCaseTitle A title for the case. +type CasesCaseTitle = string + +// CasesClosureTypes Indicates whether a case is automatically closed when it is pushed to external systems (`close-by-pushing`) or not automatically closed (`close-by-user`). +type CasesClosureTypes string + +// CasesConnectorPropertiesCasesWebhook Defines properties for connectors when type is `.cases-webhook`. +type CasesConnectorPropertiesCasesWebhook struct { + Fields *string `json:"fields,omitempty"` + + // Id The identifier for the connector. To retrieve connector IDs, use the find connectors API. + Id string `json:"id"` + + // Name The name of the connector. + Name string `json:"name"` + + // Type The type of connector. + Type CasesConnectorPropertiesCasesWebhookType `json:"type"` +} + +// CasesConnectorPropertiesCasesWebhookType The type of connector. +type CasesConnectorPropertiesCasesWebhookType string + +// CasesConnectorPropertiesJira Defines properties for connectors when type is `.jira`. +type CasesConnectorPropertiesJira struct { + // Fields An object containing the connector fields. If you want to omit any individual field, specify null as its value. + Fields struct { + // IssueType The type of issue. + IssueType *string `json:"issueType,omitempty"` + + // Parent The key of the parent issue, when the issue type is sub-task. + Parent *string `json:"parent,omitempty"` + + // Priority The priority of the issue. + Priority *string `json:"priority,omitempty"` + } `json:"fields"` + + // Id The identifier for the connector. To retrieve connector IDs, use the find connectors API. + Id string `json:"id"` + + // Name The name of the connector. + Name string `json:"name"` + + // Type The type of connector. + Type CasesConnectorPropertiesJiraType `json:"type"` +} + +// CasesConnectorPropertiesJiraType The type of connector. +type CasesConnectorPropertiesJiraType string + +// CasesConnectorPropertiesNone Defines properties for connectors when type is `.none`. +type CasesConnectorPropertiesNone struct { + // Fields An object containing the connector fields. To create a case without a connector, specify null. To update a case to remove the connector, specify null. + Fields *string `json:"fields,omitempty"` + + // Id The identifier for the connector. To create a case without a connector, use `none`. To update a case to remove the connector, specify `none`. + Id string `json:"id"` + + // Name The name of the connector. To create a case without a connector, use `none`. To update a case to remove the connector, specify `none`. + Name string `json:"name"` + + // Type The type of connector. To create a case without a connector, use `.none`. To update a case to remove the connector, specify `.none`. + Type CasesConnectorPropertiesNoneType `json:"type"` +} + +// CasesConnectorPropertiesNoneType The type of connector. To create a case without a connector, use `.none`. To update a case to remove the connector, specify `.none`. +type CasesConnectorPropertiesNoneType string + +// CasesConnectorPropertiesResilient Defines properties for connectors when type is `.resilient`. +type CasesConnectorPropertiesResilient struct { + // Fields An object containing the connector fields. If you want to omit any individual field, specify null as its value. + Fields *struct { + // IssueTypes The type of incident. + IssueTypes []string `json:"issueTypes"` + + // SeverityCode The severity code of the incident. + SeverityCode string `json:"severityCode"` + } `json:"fields,omitempty"` + + // Id The identifier for the connector. + Id string `json:"id"` + + // Name The name of the connector. + Name string `json:"name"` + + // Type The type of connector. + Type CasesConnectorPropertiesResilientType `json:"type"` +} + +// CasesConnectorPropertiesResilientType The type of connector. +type CasesConnectorPropertiesResilientType string + +// CasesConnectorPropertiesServicenow Defines properties for connectors when type is `.servicenow`. +type CasesConnectorPropertiesServicenow struct { + // Fields An object containing the connector fields. If you want to omit any individual field, specify null as its value. + Fields struct { + // Category The category of the incident. + Category *string `json:"category,omitempty"` + + // Impact The effect an incident had on business. + Impact *string `json:"impact,omitempty"` + + // Severity The severity of the incident. + Severity *string `json:"severity,omitempty"` + + // Subcategory The subcategory of the incident. + Subcategory *string `json:"subcategory,omitempty"` + + // Urgency The extent to which the incident resolution can be delayed. + Urgency *string `json:"urgency,omitempty"` + } `json:"fields"` + + // Id The identifier for the connector. To retrieve connector IDs, use the find connectors API. + Id string `json:"id"` + + // Name The name of the connector. + Name string `json:"name"` + + // Type The type of connector. + Type CasesConnectorPropertiesServicenowType `json:"type"` +} + +// CasesConnectorPropertiesServicenowType The type of connector. +type CasesConnectorPropertiesServicenowType string + +// CasesConnectorPropertiesServicenowSir Defines properties for connectors when type is `.servicenow-sir`. +type CasesConnectorPropertiesServicenowSir struct { + // Fields An object containing the connector fields. If you want to omit any individual field, specify null as its value. + Fields struct { + // Category The category of the incident. + Category *string `json:"category,omitempty"` + + // DestIp Indicates whether cases will send a comma-separated list of destination IPs. + DestIp *bool `json:"destIp,omitempty"` + + // MalwareHash Indicates whether cases will send a comma-separated list of malware hashes. + MalwareHash *bool `json:"malwareHash,omitempty"` + + // MalwareUrl Indicates whether cases will send a comma-separated list of malware URLs. + MalwareUrl *bool `json:"malwareUrl,omitempty"` + + // Priority The priority of the issue. + Priority *string `json:"priority,omitempty"` + + // SourceIp Indicates whether cases will send a comma-separated list of source IPs. + SourceIp *bool `json:"sourceIp,omitempty"` + + // Subcategory The subcategory of the incident. + Subcategory *string `json:"subcategory,omitempty"` + } `json:"fields"` + + // Id The identifier for the connector. To retrieve connector IDs, use the find connectors API. + Id string `json:"id"` + + // Name The name of the connector. + Name string `json:"name"` + + // Type The type of connector. + Type CasesConnectorPropertiesServicenowSirType `json:"type"` +} + +// CasesConnectorPropertiesServicenowSirType The type of connector. +type CasesConnectorPropertiesServicenowSirType string + +// CasesConnectorPropertiesSwimlane Defines properties for connectors when type is `.swimlane`. +type CasesConnectorPropertiesSwimlane struct { + // Fields An object containing the connector fields. If you want to omit any individual field, specify null as its value. + Fields struct { + // CaseId The case identifier for Swimlane connectors. + CaseId *string `json:"caseId,omitempty"` + } `json:"fields"` + + // Id The identifier for the connector. To retrieve connector IDs, use the find connectors API. + Id string `json:"id"` + + // Name The name of the connector. + Name string `json:"name"` + + // Type The type of connector. + Type CasesConnectorPropertiesSwimlaneType `json:"type"` +} + +// CasesConnectorPropertiesSwimlaneType The type of connector. +type CasesConnectorPropertiesSwimlaneType string + +// CasesConnectorTypes The type of connector. +type CasesConnectorTypes string + +// CasesCreateCaseRequest The create case API request body varies depending on the type of connector. +type CasesCreateCaseRequest struct { + // Assignees An array containing users that are assigned to the case. + Assignees *CasesAssignees `json:"assignees,omitempty"` + + // Category A word or phrase that categorizes the case. + Category *CasesCaseCategory `json:"category,omitempty"` + Connector CasesCreateCaseRequest_Connector `json:"connector"` + + // CustomFields Custom field values for a case. Any optional custom fields that are not specified in the request are set to null. + CustomFields *[]struct { + // Key The unique identifier for the custom field. The key value must exist in the case configuration settings. + Key string `json:"key"` + + // Type The custom field type. It must match the type specified in the case configuration settings. + Type CasesCreateCaseRequestCustomFieldsType `json:"type"` + + // Value The custom field value. If the custom field is required, it cannot be explicitly set to null. However, for cases that existed when the required custom field was added, the default value stored in Elasticsearch is `undefined`. The value returned in the API and user interface in this case is `null`. + Value CasesCreateCaseRequest_CustomFields_Value `json:"value"` + } `json:"customFields,omitempty"` + + // Description The description for the case. + Description CasesCaseDescription `json:"description"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner CasesOwner `json:"owner"` + + // Settings An object that contains the case settings. + Settings CasesSettings `json:"settings"` + + // Severity The severity of the case. + Severity *CasesCaseSeverity `json:"severity,omitempty"` + + // Tags The words and phrases that help categorize cases. It can be an empty array. + Tags CasesCaseTags `json:"tags"` + + // Title A title for the case. + Title CasesCaseTitle `json:"title"` +} + +// CasesCreateCaseRequest_Connector defines model for CasesCreateCaseRequest.Connector. +type CasesCreateCaseRequest_Connector struct { + union json.RawMessage +} + +// CasesCreateCaseRequestCustomFieldsType The custom field type. It must match the type specified in the case configuration settings. +type CasesCreateCaseRequestCustomFieldsType string + +// CasesCreateCaseRequestCustomFieldsValue0 defines model for . +type CasesCreateCaseRequestCustomFieldsValue0 = string + +// CasesCreateCaseRequestCustomFieldsValue1 defines model for . +type CasesCreateCaseRequestCustomFieldsValue1 = bool + +// CasesCreateCaseRequest_CustomFields_Value The custom field value. If the custom field is required, it cannot be explicitly set to null. However, for cases that existed when the required custom field was added, the default value stored in Elasticsearch is `undefined`. The value returned in the API and user interface in this case is `null`. +type CasesCreateCaseRequest_CustomFields_Value struct { + union json.RawMessage +} + +// CasesExternalService defines model for Cases_external_service. +type CasesExternalService struct { + ConnectorId *string `json:"connector_id,omitempty"` + ConnectorName *string `json:"connector_name,omitempty"` + ExternalId *string `json:"external_id,omitempty"` + ExternalTitle *string `json:"external_title,omitempty"` + ExternalUrl *string `json:"external_url,omitempty"` + PushedAt *time.Time `json:"pushed_at,omitempty"` + PushedBy *struct { + Email *string `json:"email,omitempty"` + FullName *string `json:"full_name,omitempty"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username,omitempty"` + } `json:"pushed_by,omitempty"` +} + +// CasesOwner The application that owns the cases: Stack Management, Observability, or Elastic Security. +type CasesOwner string + +// CasesOwners defines model for Cases_owners. +type CasesOwners = []CasesOwner + +// CasesPayloadAlertComment defines model for Cases_payload_alert_comment. +type CasesPayloadAlertComment struct { + Comment *struct { + AlertId *CasesPayloadAlertComment_Comment_AlertId `json:"alertId,omitempty"` + Index *CasesPayloadAlertComment_Comment_Index `json:"index,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner *CasesOwner `json:"owner,omitempty"` + Rule *struct { + // Id The rule identifier. + Id *string `json:"id,omitempty"` + + // Name The rule name. + Name *string `json:"name,omitempty"` + } `json:"rule,omitempty"` + Type *CasesPayloadAlertCommentCommentType `json:"type,omitempty"` + } `json:"comment,omitempty"` +} + +// CasesPayloadAlertCommentCommentAlertId0 defines model for . +type CasesPayloadAlertCommentCommentAlertId0 = string + +// CasesPayloadAlertCommentCommentAlertId1 defines model for . +type CasesPayloadAlertCommentCommentAlertId1 = []string + +// CasesPayloadAlertComment_Comment_AlertId defines model for CasesPayloadAlertComment.Comment.AlertId. +type CasesPayloadAlertComment_Comment_AlertId struct { + union json.RawMessage +} + +// CasesPayloadAlertCommentCommentIndex0 defines model for . +type CasesPayloadAlertCommentCommentIndex0 = string + +// CasesPayloadAlertCommentCommentIndex1 defines model for . +type CasesPayloadAlertCommentCommentIndex1 = []string + +// CasesPayloadAlertComment_Comment_Index defines model for CasesPayloadAlertComment.Comment.Index. +type CasesPayloadAlertComment_Comment_Index struct { + union json.RawMessage +} + +// CasesPayloadAlertCommentCommentType defines model for CasesPayloadAlertComment.Comment.Type. +type CasesPayloadAlertCommentCommentType string + +// CasesPayloadAssignees defines model for Cases_payload_assignees. +type CasesPayloadAssignees struct { + // Assignees An array containing users that are assigned to the case. + Assignees *CasesAssignees `json:"assignees,omitempty"` +} + +// CasesPayloadConnector defines model for Cases_payload_connector. +type CasesPayloadConnector struct { + Connector *struct { + // Fields An object containing the connector fields. To create a case without a connector, specify null. If you want to omit any individual field, specify null as its value. + Fields *struct { + // CaseId The case identifier for Swimlane connectors. + CaseId *string `json:"caseId,omitempty"` + + // Category The category of the incident for ServiceNow ITSM and ServiceNow SecOps connectors. + Category *string `json:"category,omitempty"` + + // DestIp Indicates whether cases will send a comma-separated list of destination IPs for ServiceNow SecOps connectors. + DestIp *bool `json:"destIp,omitempty"` + + // Impact The effect an incident had on business for ServiceNow ITSM connectors. + Impact *string `json:"impact,omitempty"` + + // IssueType The type of issue for Jira connectors. + IssueType *string `json:"issueType,omitempty"` + + // IssueTypes The type of incident for IBM Resilient connectors. + IssueTypes *[]string `json:"issueTypes,omitempty"` + + // MalwareHash Indicates whether cases will send a comma-separated list of malware hashes for ServiceNow SecOps connectors. + MalwareHash *bool `json:"malwareHash,omitempty"` + + // MalwareUrl Indicates whether cases will send a comma-separated list of malware URLs for ServiceNow SecOps connectors. + MalwareUrl *bool `json:"malwareUrl,omitempty"` + + // Parent The key of the parent issue, when the issue type is sub-task for Jira connectors. + Parent *string `json:"parent,omitempty"` + + // Priority The priority of the issue for Jira and ServiceNow SecOps connectors. + Priority *string `json:"priority,omitempty"` + + // Severity The severity of the incident for ServiceNow ITSM connectors. + Severity *string `json:"severity,omitempty"` + + // SeverityCode The severity code of the incident for IBM Resilient connectors. + SeverityCode *string `json:"severityCode,omitempty"` + + // SourceIp Indicates whether cases will send a comma-separated list of source IPs for ServiceNow SecOps connectors. + SourceIp *bool `json:"sourceIp,omitempty"` + + // Subcategory The subcategory of the incident for ServiceNow ITSM connectors. + Subcategory *string `json:"subcategory,omitempty"` + + // Urgency The extent to which the incident resolution can be delayed for ServiceNow ITSM connectors. + Urgency *string `json:"urgency,omitempty"` + } `json:"fields,omitempty"` + + // Id The identifier for the connector. To create a case without a connector, use `none`. + Id *string `json:"id,omitempty"` + + // Name The name of the connector. To create a case without a connector, use `none`. + Name *string `json:"name,omitempty"` + + // Type The type of connector. + Type *CasesConnectorTypes `json:"type,omitempty"` + } `json:"connector,omitempty"` +} + +// CasesPayloadCreateCase defines model for Cases_payload_create_case. +type CasesPayloadCreateCase struct { + // Assignees An array containing users that are assigned to the case. + Assignees *CasesAssignees `json:"assignees,omitempty"` + Connector *struct { + // Fields An object containing the connector fields. To create a case without a connector, specify null. If you want to omit any individual field, specify null as its value. + Fields *struct { + // CaseId The case identifier for Swimlane connectors. + CaseId *string `json:"caseId,omitempty"` + + // Category The category of the incident for ServiceNow ITSM and ServiceNow SecOps connectors. + Category *string `json:"category,omitempty"` + + // DestIp Indicates whether cases will send a comma-separated list of destination IPs for ServiceNow SecOps connectors. + DestIp *bool `json:"destIp,omitempty"` + + // Impact The effect an incident had on business for ServiceNow ITSM connectors. + Impact *string `json:"impact,omitempty"` + + // IssueType The type of issue for Jira connectors. + IssueType *string `json:"issueType,omitempty"` + + // IssueTypes The type of incident for IBM Resilient connectors. + IssueTypes *[]string `json:"issueTypes,omitempty"` + + // MalwareHash Indicates whether cases will send a comma-separated list of malware hashes for ServiceNow SecOps connectors. + MalwareHash *bool `json:"malwareHash,omitempty"` + + // MalwareUrl Indicates whether cases will send a comma-separated list of malware URLs for ServiceNow SecOps connectors. + MalwareUrl *bool `json:"malwareUrl,omitempty"` + + // Parent The key of the parent issue, when the issue type is sub-task for Jira connectors. + Parent *string `json:"parent,omitempty"` + + // Priority The priority of the issue for Jira and ServiceNow SecOps connectors. + Priority *string `json:"priority,omitempty"` + + // Severity The severity of the incident for ServiceNow ITSM connectors. + Severity *string `json:"severity,omitempty"` + + // SeverityCode The severity code of the incident for IBM Resilient connectors. + SeverityCode *string `json:"severityCode,omitempty"` + + // SourceIp Indicates whether cases will send a comma-separated list of source IPs for ServiceNow SecOps connectors. + SourceIp *bool `json:"sourceIp,omitempty"` + + // Subcategory The subcategory of the incident for ServiceNow ITSM connectors. + Subcategory *string `json:"subcategory,omitempty"` + + // Urgency The extent to which the incident resolution can be delayed for ServiceNow ITSM connectors. + Urgency *string `json:"urgency,omitempty"` + } `json:"fields,omitempty"` + + // Id The identifier for the connector. To create a case without a connector, use `none`. + Id *string `json:"id,omitempty"` + + // Name The name of the connector. To create a case without a connector, use `none`. + Name *string `json:"name,omitempty"` + + // Type The type of connector. + Type *CasesConnectorTypes `json:"type,omitempty"` + } `json:"connector,omitempty"` + Description *string `json:"description,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner *CasesOwner `json:"owner,omitempty"` + + // Settings An object that contains the case settings. + Settings *CasesSettings `json:"settings,omitempty"` + + // Severity The severity of the case. + Severity *CasesCaseSeverity `json:"severity,omitempty"` + + // Status The status of the case. + Status *CasesCaseStatus `json:"status,omitempty"` + Tags *[]string `json:"tags,omitempty"` + Title *string `json:"title,omitempty"` +} + +// CasesPayloadDelete If the `action` is `delete` and the `type` is `delete_case`, the payload is nullable. +type CasesPayloadDelete = map[string]interface{} + +// CasesPayloadDescription defines model for Cases_payload_description. +type CasesPayloadDescription struct { + Description *string `json:"description,omitempty"` +} + +// CasesPayloadPushed defines model for Cases_payload_pushed. +type CasesPayloadPushed struct { + ExternalService *CasesExternalService `json:"externalService,omitempty"` +} + +// CasesPayloadSettings defines model for Cases_payload_settings. +type CasesPayloadSettings struct { + // Settings An object that contains the case settings. + Settings *CasesSettings `json:"settings,omitempty"` +} + +// CasesPayloadSeverity defines model for Cases_payload_severity. +type CasesPayloadSeverity struct { + // Severity The severity of the case. + Severity *CasesCaseSeverity `json:"severity,omitempty"` +} + +// CasesPayloadStatus defines model for Cases_payload_status. +type CasesPayloadStatus struct { + // Status The status of the case. + Status *CasesCaseStatus `json:"status,omitempty"` +} + +// CasesPayloadTags defines model for Cases_payload_tags. +type CasesPayloadTags struct { + Tags *[]string `json:"tags,omitempty"` +} + +// CasesPayloadTitle defines model for Cases_payload_title. +type CasesPayloadTitle struct { + Title *string `json:"title,omitempty"` +} + +// CasesPayloadUserComment defines model for Cases_payload_user_comment. +type CasesPayloadUserComment struct { + Comment *struct { + Comment *string `json:"comment,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner *CasesOwner `json:"owner,omitempty"` + Type *CasesPayloadUserCommentCommentType `json:"type,omitempty"` + } `json:"comment,omitempty"` +} + +// CasesPayloadUserCommentCommentType defines model for CasesPayloadUserComment.Comment.Type. +type CasesPayloadUserCommentCommentType string + +// CasesRule The rule that is associated with the alerts. It is required only when `type` is `alert`. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. +type CasesRule struct { + // Id The rule identifier. + Id *string `json:"id,omitempty"` + + // Name The rule name. + Name *string `json:"name,omitempty"` +} + +// CasesSearchFieldsType The fields to perform the `simple_query_string` parsed query against. +type CasesSearchFieldsType string + +// CasesSearchFieldsTypeArray defines model for Cases_searchFieldsTypeArray. +type CasesSearchFieldsTypeArray = []CasesSearchFieldsType + +// CasesSetCaseConfigurationRequest External connection details, such as the closure type and default connector for cases. +type CasesSetCaseConfigurationRequest struct { + // ClosureType Indicates whether a case is automatically closed when it is pushed to external systems (`close-by-pushing`) or not automatically closed (`close-by-user`). + ClosureType CasesClosureTypes `json:"closure_type"` + + // Connector An object that contains the connector configuration. + Connector struct { + // Fields The fields specified in the case configuration are not used and are not propagated to individual cases, therefore it is recommended to set it to `null`. + Fields *map[string]interface{} `json:"fields,omitempty"` + + // Id The identifier for the connector. If you do not want a default connector, use `none`. To retrieve connector IDs, use the find connectors API. + Id string `json:"id"` + + // Name The name of the connector. If you do not want a default connector, use `none`. To retrieve connector names, use the find connectors API. + Name string `json:"name"` + + // Type The type of connector. + Type CasesConnectorTypes `json:"type"` + } `json:"connector"` + + // CustomFields Custom fields case configuration. + CustomFields *[]struct { + // DefaultValue A default value for the custom field. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. + DefaultValue *CasesSetCaseConfigurationRequest_CustomFields_DefaultValue `json:"defaultValue,omitempty"` + + // Key A unique key for the custom field. Must be lower case and composed only of a-z, 0-9, '_', and '-' characters. It is used in API calls to refer to a specific custom field. + Key string `json:"key"` + + // Label The custom field label that is displayed in the case. + Label string `json:"label"` + + // Required Indicates whether the field is required. If `false`, the custom field can be set to null or omitted when a case is created or updated. + Required bool `json:"required"` + + // Type The type of the custom field. + Type CasesSetCaseConfigurationRequestCustomFieldsType `json:"type"` + } `json:"customFields,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner CasesOwner `json:"owner"` + Templates *CasesTemplates `json:"templates,omitempty"` +} + +// CasesSetCaseConfigurationRequestCustomFieldsDefaultValue0 defines model for . +type CasesSetCaseConfigurationRequestCustomFieldsDefaultValue0 = string + +// CasesSetCaseConfigurationRequestCustomFieldsDefaultValue1 defines model for . +type CasesSetCaseConfigurationRequestCustomFieldsDefaultValue1 = bool + +// CasesSetCaseConfigurationRequest_CustomFields_DefaultValue A default value for the custom field. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. +type CasesSetCaseConfigurationRequest_CustomFields_DefaultValue struct { + union json.RawMessage +} + +// CasesSetCaseConfigurationRequestCustomFieldsType The type of the custom field. +type CasesSetCaseConfigurationRequestCustomFieldsType string + +// CasesSettings An object that contains the case settings. +type CasesSettings struct { + // SyncAlerts Turns alert syncing on or off. + SyncAlerts bool `json:"syncAlerts"` +} + +// CasesString defines model for Cases_string. +type CasesString = string + +// CasesStringArray defines model for Cases_string_array. +type CasesStringArray = []CasesString + +// CasesTemplateTags The words and phrases that help categorize templates. It can be an empty array. +type CasesTemplateTags = []string + +// CasesTemplates defines model for Cases_templates. +type CasesTemplates = []struct { + CaseFields *struct { + // Assignees An array containing users that are assigned to the case. + Assignees *CasesAssignees `json:"assignees,omitempty"` + + // Category A word or phrase that categorizes the case. + Category *CasesCaseCategory `json:"category,omitempty"` + Connector *struct { + // Fields The fields specified in the case configuration are not used and are not propagated to individual cases, therefore it is recommended to set it to `null`. + Fields *map[string]interface{} `json:"fields,omitempty"` + + // Id The identifier for the connector. If you do not want a default connector, use `none`. To retrieve connector IDs, use the find connectors API. + Id *string `json:"id,omitempty"` + + // Name The name of the connector. If you do not want a default connector, use `none`. To retrieve connector names, use the find connectors API. + Name *string `json:"name,omitempty"` + + // Type The type of connector. + Type *CasesConnectorTypes `json:"type,omitempty"` + } `json:"connector,omitempty"` + + // CustomFields Custom field values in the template. + CustomFields *[]struct { + // Key The unique key for the custom field. + Key *string `json:"key,omitempty"` + + // Type The type of the custom field. + Type *CasesTemplatesCaseFieldsCustomFieldsType `json:"type,omitempty"` + + // Value The default value for the custom field when a case uses the template. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. + Value *CasesTemplates_CaseFields_CustomFields_Value `json:"value,omitempty"` + } `json:"customFields,omitempty"` + + // Description The description for the case. + Description *CasesCaseDescription `json:"description,omitempty"` + + // Settings An object that contains the case settings. + Settings *CasesSettings `json:"settings,omitempty"` + + // Severity The severity of the case. + Severity *CasesCaseSeverity `json:"severity,omitempty"` + + // Tags The words and phrases that help categorize cases. It can be an empty array. + Tags *CasesCaseTags `json:"tags,omitempty"` + + // Title A title for the case. + Title *CasesCaseTitle `json:"title,omitempty"` + } `json:"caseFields,omitempty"` + + // Description A description for the template. + Description *string `json:"description,omitempty"` + + // Key A unique key for the template. Must be lower case and composed only of a-z, 0-9, '_', and '-' characters. It is used in API calls to refer to a specific template. + Key *string `json:"key,omitempty"` + + // Name The name of the template. + Name *string `json:"name,omitempty"` + + // Tags The words and phrases that help categorize templates. It can be an empty array. + Tags *CasesTemplateTags `json:"tags,omitempty"` +} + +// CasesTemplatesCaseFieldsCustomFieldsType The type of the custom field. +type CasesTemplatesCaseFieldsCustomFieldsType string + +// CasesTemplatesCaseFieldsCustomFieldsValue0 defines model for . +type CasesTemplatesCaseFieldsCustomFieldsValue0 = string + +// CasesTemplatesCaseFieldsCustomFieldsValue1 defines model for . +type CasesTemplatesCaseFieldsCustomFieldsValue1 = bool + +// CasesTemplates_CaseFields_CustomFields_Value The default value for the custom field when a case uses the template. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. +type CasesTemplates_CaseFields_CustomFields_Value struct { + union json.RawMessage +} + +// CasesUpdateAlertCommentRequestProperties Defines properties for case comment requests when type is alert. +type CasesUpdateAlertCommentRequestProperties struct { + // AlertId The alert identifiers. It is required only when `type` is `alert`. You can use an array of strings to add multiple alerts to a case, provided that they all relate to the same rule; `index` must also be an array with the same length or number of elements. Adding multiple alerts in this manner is recommended rather than calling the API multiple times. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. + AlertId CasesAlertIdentifiers `json:"alertId"` + + // Id The identifier for the comment. To retrieve comment IDs, use the get comments API. + Id string `json:"id"` + + // Index The alert indices. It is required only when `type` is `alert`. If you are adding multiple alerts to a case, use an array of strings; the position of each index name in the array must match the position of the corresponding alert identifier in the `alertId` array. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. + Index CasesAlertIndices `json:"index"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner CasesOwner `json:"owner"` + + // Rule The rule that is associated with the alerts. It is required only when `type` is `alert`. This functionality is in technical preview and may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview are not subject to the support SLA of official GA features. + Rule CasesRule `json:"rule"` + + // Type The type of comment. + Type CasesUpdateAlertCommentRequestPropertiesType `json:"type"` + + // Version The current comment version. To retrieve version values, use the get comments API. + Version string `json:"version"` +} + +// CasesUpdateAlertCommentRequestPropertiesType The type of comment. +type CasesUpdateAlertCommentRequestPropertiesType string + +// CasesUpdateCaseCommentRequest The update case comment API request body varies depending on whether you are updating an alert or a comment. +type CasesUpdateCaseCommentRequest struct { + union json.RawMessage +} + +// CasesUpdateCaseConfigurationRequest You can update settings such as the closure type, custom fields, templates, and the default connector for cases. +type CasesUpdateCaseConfigurationRequest struct { + // ClosureType Indicates whether a case is automatically closed when it is pushed to external systems (`close-by-pushing`) or not automatically closed (`close-by-user`). + ClosureType *CasesClosureTypes `json:"closure_type,omitempty"` + + // Connector An object that contains the connector configuration. + Connector *struct { + // Fields The fields specified in the case configuration are not used and are not propagated to individual cases, therefore it is recommended to set it to `null`. + Fields *map[string]interface{} `json:"fields,omitempty"` + + // Id The identifier for the connector. If you do not want a default connector, use `none`. To retrieve connector IDs, use the find connectors API. + Id string `json:"id"` + + // Name The name of the connector. If you do not want a default connector, use `none`. To retrieve connector names, use the find connectors API. + Name string `json:"name"` + + // Type The type of connector. + Type CasesConnectorTypes `json:"type"` + } `json:"connector,omitempty"` + + // CustomFields Custom fields case configuration. + CustomFields *[]struct { + // DefaultValue A default value for the custom field. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. + DefaultValue *CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue `json:"defaultValue,omitempty"` + + // Key A unique key for the custom field. Must be lower case and composed only of a-z, 0-9, '_', and '-' characters. It is used in API calls to refer to a specific custom field. + Key string `json:"key"` + + // Label The custom field label that is displayed in the case. + Label string `json:"label"` + + // Required Indicates whether the field is required. If `false`, the custom field can be set to null or omitted when a case is created or updated. + Required bool `json:"required"` + + // Type The type of the custom field. + Type CasesUpdateCaseConfigurationRequestCustomFieldsType `json:"type"` + } `json:"customFields,omitempty"` + Templates *CasesTemplates `json:"templates,omitempty"` + + // Version The version of the connector. To retrieve the version value, use the get configuration API. + Version string `json:"version"` +} + +// CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0 defines model for . +type CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0 = string + +// CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1 defines model for . +type CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1 = bool + +// CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue A default value for the custom field. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. +type CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue struct { + union json.RawMessage +} + +// CasesUpdateCaseConfigurationRequestCustomFieldsType The type of the custom field. +type CasesUpdateCaseConfigurationRequestCustomFieldsType string + +// CasesUpdateCaseRequest The update case API request body varies depending on the type of connector. +type CasesUpdateCaseRequest struct { + // Cases An array containing one or more case objects. + Cases []struct { + // Assignees An array containing users that are assigned to the case. + Assignees *CasesAssignees `json:"assignees,omitempty"` + + // Category A word or phrase that categorizes the case. + Category *CasesCaseCategory `json:"category,omitempty"` + Connector *CasesUpdateCaseRequest_Cases_Connector `json:"connector,omitempty"` + + // CustomFields Custom field values for a case. Any optional custom fields that are not specified in the request are set to null. + CustomFields *[]struct { + // Key The unique identifier for the custom field. The key value must exist in the case configuration settings. + Key string `json:"key"` + + // Type The custom field type. It must match the type specified in the case configuration settings. + Type CasesUpdateCaseRequestCasesCustomFieldsType `json:"type"` + + // Value The custom field value. If the custom field is required, it cannot be explicitly set to null. However, for cases that existed when the required custom field was added, the default value stored in Elasticsearch is `undefined`. The value returned in the API and user interface in this case is `null`. + Value CasesUpdateCaseRequest_Cases_CustomFields_Value `json:"value"` + } `json:"customFields,omitempty"` + + // Description The description for the case. + Description *CasesCaseDescription `json:"description,omitempty"` + + // Id The identifier for the case. + Id string `json:"id"` + + // Settings An object that contains the case settings. + Settings *CasesSettings `json:"settings,omitempty"` + + // Severity The severity of the case. + Severity *CasesCaseSeverity `json:"severity,omitempty"` + + // Status The status of the case. + Status *CasesCaseStatus `json:"status,omitempty"` + + // Tags The words and phrases that help categorize cases. It can be an empty array. + Tags *CasesCaseTags `json:"tags,omitempty"` + + // Title A title for the case. + Title *CasesCaseTitle `json:"title,omitempty"` + + // Version The current version of the case. To determine this value, use the get case or find cases APIs. + Version string `json:"version"` + } `json:"cases"` +} + +// CasesUpdateCaseRequest_Cases_Connector defines model for CasesUpdateCaseRequest.Cases.Connector. +type CasesUpdateCaseRequest_Cases_Connector struct { + union json.RawMessage +} + +// CasesUpdateCaseRequestCasesCustomFieldsType The custom field type. It must match the type specified in the case configuration settings. +type CasesUpdateCaseRequestCasesCustomFieldsType string + +// CasesUpdateCaseRequestCasesCustomFieldsValue0 defines model for . +type CasesUpdateCaseRequestCasesCustomFieldsValue0 = string + +// CasesUpdateCaseRequestCasesCustomFieldsValue1 defines model for . +type CasesUpdateCaseRequestCasesCustomFieldsValue1 = bool + +// CasesUpdateCaseRequest_Cases_CustomFields_Value The custom field value. If the custom field is required, it cannot be explicitly set to null. However, for cases that existed when the required custom field was added, the default value stored in Elasticsearch is `undefined`. The value returned in the API and user interface in this case is `null`. +type CasesUpdateCaseRequest_Cases_CustomFields_Value struct { + union json.RawMessage +} + +// CasesUpdateUserCommentRequestProperties Defines properties for case comment requests when type is user. +type CasesUpdateUserCommentRequestProperties struct { + // Comment The new comment. It is required only when `type` is `user`. + Comment string `json:"comment"` + + // Id The identifier for the comment. To retrieve comment IDs, use the get comments API. + Id string `json:"id"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner CasesOwner `json:"owner"` + + // Type The type of comment. + Type CasesUpdateUserCommentRequestPropertiesType `json:"type"` + + // Version The current comment version. To retrieve version values, use the get comments API. + Version string `json:"version"` +} + +// CasesUpdateUserCommentRequestPropertiesType The type of comment. +type CasesUpdateUserCommentRequestPropertiesType string + +// CasesUserActionsFindResponseProperties defines model for Cases_user_actions_find_response_properties. +type CasesUserActionsFindResponseProperties struct { + Action CasesActions `json:"action"` + CommentId *string `json:"comment_id,omitempty"` + CreatedAt time.Time `json:"created_at"` + CreatedBy struct { + Email *string `json:"email,omitempty"` + FullName *string `json:"full_name,omitempty"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username,omitempty"` + } `json:"created_by"` + Id string `json:"id"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner CasesOwner `json:"owner"` + Payload CasesUserActionsFindResponseProperties_Payload `json:"payload"` + + // Type The type of action. + Type CasesUserActionsFindResponsePropertiesType `json:"type"` + Version string `json:"version"` +} + +// CasesUserActionsFindResponseProperties_Payload defines model for CasesUserActionsFindResponseProperties.Payload. +type CasesUserActionsFindResponseProperties_Payload struct { + union json.RawMessage +} + +// CasesUserActionsFindResponsePropertiesType The type of action. +type CasesUserActionsFindResponsePropertiesType string + +// CasesUserCommentResponseProperties defines model for Cases_user_comment_response_properties. +type CasesUserCommentResponseProperties struct { + Comment *string `json:"comment,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + CreatedBy *CasesCaseResponseCreatedByProperties `json:"created_by,omitempty"` + Id *string `json:"id,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner *CasesOwner `json:"owner,omitempty"` + PushedAt *time.Time `json:"pushed_at,omitempty"` + PushedBy *CasesCaseResponsePushedByProperties `json:"pushed_by,omitempty"` + Type CasesUserCommentResponsePropertiesType `json:"type"` + UpdatedAt *time.Time `json:"updated_at,omitempty"` + UpdatedBy *CasesCaseResponseUpdatedByProperties `json:"updated_by,omitempty"` + Version *string `json:"version,omitempty"` +} + +// CasesUserCommentResponsePropertiesType defines model for CasesUserCommentResponseProperties.Type. +type CasesUserCommentResponsePropertiesType string + +// DataViews400Response defines model for Data_views_400_response. +type DataViews400Response struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode float32 `json:"statusCode"` +} + +// DataViews404Response defines model for Data_views_404_response. +type DataViews404Response struct { + Error *DataViews404ResponseError `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *DataViews404ResponseStatusCode `json:"statusCode,omitempty"` +} + +// DataViews404ResponseError defines model for DataViews404Response.Error. +type DataViews404ResponseError string + +// DataViews404ResponseStatusCode defines model for DataViews404Response.StatusCode. +type DataViews404ResponseStatusCode int + +// DataViewsAllownoindex Allows the data view saved object to exist before the data is available. +type DataViewsAllownoindex = bool + +// DataViewsCreateDataViewRequestObject defines model for Data_views_create_data_view_request_object. +type DataViewsCreateDataViewRequestObject struct { + // DataView The data view object. + DataView DataViewsCreateDataViewRequestObjectInner `json:"data_view"` + + // Override Override an existing data view if a data view with the provided title already exists. + Override *bool `json:"override,omitempty"` +} + +// DataViewsCreateDataViewRequestObjectInner The data view object. +type DataViewsCreateDataViewRequestObjectInner struct { + // AllowNoIndex Allows the data view saved object to exist before the data is available. + AllowNoIndex *DataViewsAllownoindex `json:"allowNoIndex,omitempty"` + FieldAttrs *map[string]DataViewsFieldattrs `json:"fieldAttrs,omitempty"` + + // FieldFormats A map of field formats by field name. + FieldFormats *DataViewsFieldformats `json:"fieldFormats,omitempty"` + Fields *map[string]interface{} `json:"fields,omitempty"` + Id *string `json:"id,omitempty"` + + // Name The data view name. + Name *string `json:"name,omitempty"` + + // Namespaces An array of space identifiers for sharing the data view between multiple spaces. + Namespaces *DataViewsNamespaces `json:"namespaces,omitempty"` + RuntimeFieldMap *map[string]DataViewsRuntimefieldmap `json:"runtimeFieldMap,omitempty"` + + // SourceFilters The array of field names you want to filter out in Discover. + SourceFilters *DataViewsSourcefilters `json:"sourceFilters,omitempty"` + + // TimeFieldName The timestamp field name, which you use for time-based data views. + TimeFieldName *DataViewsTimefieldname `json:"timeFieldName,omitempty"` + + // Title Comma-separated list of data streams, indices, and aliases that you want to search. Supports wildcards (`*`). + Title DataViewsTitle `json:"title"` + + // Type When set to `rollup`, identifies the rollup data views. + Type *DataViewsType `json:"type,omitempty"` + + // TypeMeta When you use rollup indices, contains the field list for the rollup data view API endpoints. + TypeMeta *DataViewsTypemeta `json:"typeMeta,omitempty"` + Version *string `json:"version,omitempty"` +} + +// DataViewsDataViewResponseObject defines model for Data_views_data_view_response_object. +type DataViewsDataViewResponseObject struct { + DataView *DataViewsDataViewResponseObjectInner `json:"data_view,omitempty"` +} + +// DataViewsDataViewResponseObjectInner defines model for Data_views_data_view_response_object_inner. +type DataViewsDataViewResponseObjectInner struct { + // AllowNoIndex Allows the data view saved object to exist before the data is available. + AllowNoIndex *DataViewsAllownoindex `json:"allowNoIndex,omitempty"` + FieldAttrs *map[string]DataViewsFieldattrs `json:"fieldAttrs,omitempty"` + + // FieldFormats A map of field formats by field name. + FieldFormats *DataViewsFieldformats `json:"fieldFormats,omitempty"` + Fields *map[string]interface{} `json:"fields,omitempty"` + Id *string `json:"id,omitempty"` + + // Name The data view name. + Name *string `json:"name,omitempty"` + + // Namespaces An array of space identifiers for sharing the data view between multiple spaces. + Namespaces *DataViewsNamespaces `json:"namespaces,omitempty"` + RuntimeFieldMap *map[string]DataViewsRuntimefieldmap `json:"runtimeFieldMap,omitempty"` + + // SourceFilters The array of field names you want to filter out in Discover. + SourceFilters *DataViewsSourcefilters `json:"sourceFilters,omitempty"` + + // TimeFieldName The timestamp field name, which you use for time-based data views. + TimeFieldName *DataViewsTimefieldname `json:"timeFieldName,omitempty"` + + // Title Comma-separated list of data streams, indices, and aliases that you want to search. Supports wildcards (`*`). + Title *DataViewsTitle `json:"title,omitempty"` + + // TypeMeta When you use rollup indices, contains the field list for the rollup data view API endpoints. + TypeMeta *DataViewsTypemetaResponse `json:"typeMeta,omitempty"` + Version *string `json:"version,omitempty"` +} + +// DataViewsFieldattrs A map of field attributes by field name. +type DataViewsFieldattrs struct { + // Count Popularity count for the field. + Count *int `json:"count,omitempty"` + + // CustomDescription Custom description for the field. + CustomDescription *string `json:"customDescription,omitempty"` + + // CustomLabel Custom label for the field. + CustomLabel *string `json:"customLabel,omitempty"` +} + +// DataViewsFieldformat defines model for Data_views_fieldformat. +type DataViewsFieldformat struct { + Id *string `json:"id,omitempty"` + Params *DataViewsFieldformatParams `json:"params,omitempty"` +} + +// DataViewsFieldformatParams defines model for Data_views_fieldformat_params. +type DataViewsFieldformatParams struct { + Colors *[]DataViewsFieldformatParamsColor `json:"colors,omitempty"` + FieldLength *int `json:"fieldLength,omitempty"` + FieldType *string `json:"fieldType,omitempty"` + Height *int `json:"height,omitempty"` + IncludeSpaceWithSuffix *bool `json:"includeSpaceWithSuffix,omitempty"` + InputFormat *string `json:"inputFormat,omitempty"` + LabelTemplate *string `json:"labelTemplate,omitempty"` + LookupEntries *[]DataViewsFieldformatParamsLookup `json:"lookupEntries,omitempty"` + OutputFormat *string `json:"outputFormat,omitempty"` + OutputPrecision *int `json:"outputPrecision,omitempty"` + Pattern *string `json:"pattern,omitempty"` + Timezone *string `json:"timezone,omitempty"` + Transform *string `json:"transform,omitempty"` + Type *string `json:"type,omitempty"` + UnknownKeyValue *string `json:"unknownKeyValue,omitempty"` + UrlTemplate *string `json:"urlTemplate,omitempty"` + UseShortSuffix *bool `json:"useShortSuffix,omitempty"` + Width *int `json:"width,omitempty"` +} + +// DataViewsFieldformatParamsColor defines model for Data_views_fieldformat_params_color. +type DataViewsFieldformatParamsColor struct { + Background *string `json:"background,omitempty"` + Range *string `json:"range,omitempty"` + Regex *string `json:"regex,omitempty"` + Text *string `json:"text,omitempty"` +} + +// DataViewsFieldformatParamsLookup defines model for Data_views_fieldformat_params_lookup. +type DataViewsFieldformatParamsLookup struct { + Key *string `json:"key,omitempty"` + Value *string `json:"value,omitempty"` +} + +// DataViewsFieldformats A map of field formats by field name. +type DataViewsFieldformats map[string]DataViewsFieldformat + +// DataViewsNamespaces An array of space identifiers for sharing the data view between multiple spaces. +type DataViewsNamespaces = []string + +// DataViewsRuntimefieldmap A map of runtime field definitions by field name. +type DataViewsRuntimefieldmap struct { + Script DataViewsRuntimefieldmapScript `json:"script"` + + // Type Mapping type of the runtime field. + Type string `json:"type"` +} + +// DataViewsRuntimefieldmapScript defines model for Data_views_runtimefieldmap_script. +type DataViewsRuntimefieldmapScript struct { + // Source Script for the runtime field. + Source *string `json:"source,omitempty"` +} + +// DataViewsSourcefilterItem defines model for Data_views_sourcefilter_item. +type DataViewsSourcefilterItem struct { + Value string `json:"value"` +} + +// DataViewsSourcefilters The array of field names you want to filter out in Discover. +type DataViewsSourcefilters = []DataViewsSourcefilterItem + +// DataViewsSwapDataViewRequestObject defines model for Data_views_swap_data_view_request_object. +type DataViewsSwapDataViewRequestObject struct { + // Delete Deletes referenced saved object if all references are removed. + Delete *bool `json:"delete,omitempty"` + + // ForId Limit the affected saved objects to one or more by identifier. + ForId *DataViewsSwapDataViewRequestObject_ForId `json:"forId,omitempty"` + + // ForType Limit the affected saved objects by type. + ForType *string `json:"forType,omitempty"` + + // FromId The saved object reference to change. + FromId string `json:"fromId"` + + // FromType Specify the type of the saved object reference to alter. The default value is `index-pattern` for data views. + FromType *string `json:"fromType,omitempty"` + + // ToId New saved object reference value to replace the old value. + ToId string `json:"toId"` +} + +// DataViewsSwapDataViewRequestObjectForId0 defines model for . +type DataViewsSwapDataViewRequestObjectForId0 = string + +// DataViewsSwapDataViewRequestObjectForId1 defines model for . +type DataViewsSwapDataViewRequestObjectForId1 = []string + +// DataViewsSwapDataViewRequestObject_ForId Limit the affected saved objects to one or more by identifier. +type DataViewsSwapDataViewRequestObject_ForId struct { + union json.RawMessage +} + +// DataViewsTimefieldname The timestamp field name, which you use for time-based data views. +type DataViewsTimefieldname = string + +// DataViewsTitle Comma-separated list of data streams, indices, and aliases that you want to search. Supports wildcards (`*`). +type DataViewsTitle = string + +// DataViewsType When set to `rollup`, identifies the rollup data views. +type DataViewsType = string + +// DataViewsTypemeta When you use rollup indices, contains the field list for the rollup data view API endpoints. +type DataViewsTypemeta struct { + // Aggs A map of rollup restrictions by aggregation type and field name. + Aggs map[string]interface{} `json:"aggs"` + + // Params Properties for retrieving rollup fields. + Params map[string]interface{} `json:"params"` +} + +// DataViewsTypemetaResponse When you use rollup indices, contains the field list for the rollup data view API endpoints. +type DataViewsTypemetaResponse struct { + // Aggs A map of rollup restrictions by aggregation type and field name. + Aggs *map[string]interface{} `json:"aggs,omitempty"` + + // Params Properties for retrieving rollup fields. + Params *map[string]interface{} `json:"params,omitempty"` +} + +// DataViewsUpdateDataViewRequestObject defines model for Data_views_update_data_view_request_object. +type DataViewsUpdateDataViewRequestObject struct { + // DataView The data view properties you want to update. Only the specified properties are updated in the data view. Unspecified fields stay as they are persisted. + DataView DataViewsUpdateDataViewRequestObjectInner `json:"data_view"` + + // RefreshFields Reloads the data view fields after the data view is updated. + RefreshFields *bool `json:"refresh_fields,omitempty"` +} + +// DataViewsUpdateDataViewRequestObjectInner The data view properties you want to update. Only the specified properties are updated in the data view. Unspecified fields stay as they are persisted. +type DataViewsUpdateDataViewRequestObjectInner struct { + // AllowNoIndex Allows the data view saved object to exist before the data is available. + AllowNoIndex *DataViewsAllownoindex `json:"allowNoIndex,omitempty"` + + // FieldFormats A map of field formats by field name. + FieldFormats *DataViewsFieldformats `json:"fieldFormats,omitempty"` + Fields *map[string]interface{} `json:"fields,omitempty"` + Name *string `json:"name,omitempty"` + RuntimeFieldMap *map[string]DataViewsRuntimefieldmap `json:"runtimeFieldMap,omitempty"` + + // SourceFilters The array of field names you want to filter out in Discover. + SourceFilters *DataViewsSourcefilters `json:"sourceFilters,omitempty"` + + // TimeFieldName The timestamp field name, which you use for time-based data views. + TimeFieldName *DataViewsTimefieldname `json:"timeFieldName,omitempty"` + + // Title Comma-separated list of data streams, indices, and aliases that you want to search. Supports wildcards (`*`). + Title *DataViewsTitle `json:"title,omitempty"` + + // Type When set to `rollup`, identifies the rollup data views. + Type *DataViewsType `json:"type,omitempty"` + + // TypeMeta When you use rollup indices, contains the field list for the rollup data view API endpoints. + TypeMeta *DataViewsTypemeta `json:"typeMeta,omitempty"` +} + +// KibanaHTTPAPIsCoreStatusRedactedResponse A minimal representation of Kibana's operational status. +type KibanaHTTPAPIsCoreStatusRedactedResponse struct { + Status struct { + Overall struct { + // Level Service status levels as human and machine readable values. + Level KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevel `json:"level"` + } `json:"overall"` + } `json:"status"` +} + +// KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevel Service status levels as human and machine readable values. +type KibanaHTTPAPIsCoreStatusRedactedResponseStatusOverallLevel string + +// KibanaHTTPAPIsCoreStatusResponse Kibana's operational status as well as a detailed breakdown of plugin statuses indication of various loads (like event loop utilization and network traffic) at time of request. +type KibanaHTTPAPIsCoreStatusResponse struct { + // Metrics Metric groups collected by Kibana. + Metrics struct { + // CollectionIntervalInMillis The interval at which metrics should be collected. + CollectionIntervalInMillis float32 `json:"collection_interval_in_millis"` + + // ElasticsearchClient Current network metrics of Kibana's Elasticsearch client. + ElasticsearchClient struct { + // TotalActiveSockets Count of network sockets currently in use. + TotalActiveSockets float32 `json:"totalActiveSockets"` + + // TotalIdleSockets Count of network sockets currently idle. + TotalIdleSockets float32 `json:"totalIdleSockets"` + + // TotalQueuedRequests Count of requests not yet assigned to sockets. + TotalQueuedRequests float32 `json:"totalQueuedRequests"` + } `json:"elasticsearch_client"` + + // LastUpdated The time metrics were collected. + LastUpdated string `json:"last_updated"` + } `json:"metrics"` + + // Name Kibana instance name. + Name string `json:"name"` + Status struct { + // Core Statuses of core Kibana services. + Core struct { + Elasticsearch struct { + // Detail Human readable detail of the service status. + Detail *string `json:"detail,omitempty"` + + // DocumentationUrl A URL to further documentation regarding this service. + DocumentationUrl *string `json:"documentationUrl,omitempty"` + + // Level Service status levels as human and machine readable values. + Level KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevel `json:"level"` + + // Meta An unstructured set of extra metadata about this service. + Meta map[string]interface{} `json:"meta"` + + // Summary A human readable summary of the service status. + Summary string `json:"summary"` + } `json:"elasticsearch"` + Http *struct { + // Detail Human readable detail of the service status. + Detail *string `json:"detail,omitempty"` + + // DocumentationUrl A URL to further documentation regarding this service. + DocumentationUrl *string `json:"documentationUrl,omitempty"` + + // Level Service status levels as human and machine readable values. + Level KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevel `json:"level"` + + // Meta An unstructured set of extra metadata about this service. + Meta map[string]interface{} `json:"meta"` + + // Summary A human readable summary of the service status. + Summary string `json:"summary"` + } `json:"http,omitempty"` + SavedObjects struct { + // Detail Human readable detail of the service status. + Detail *string `json:"detail,omitempty"` + + // DocumentationUrl A URL to further documentation regarding this service. + DocumentationUrl *string `json:"documentationUrl,omitempty"` + + // Level Service status levels as human and machine readable values. + Level KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevel `json:"level"` + + // Meta An unstructured set of extra metadata about this service. + Meta map[string]interface{} `json:"meta"` + + // Summary A human readable summary of the service status. + Summary string `json:"summary"` + } `json:"savedObjects"` + } `json:"core"` + Overall struct { + // Detail Human readable detail of the service status. + Detail *string `json:"detail,omitempty"` + + // DocumentationUrl A URL to further documentation regarding this service. + DocumentationUrl *string `json:"documentationUrl,omitempty"` + + // Level Service status levels as human and machine readable values. + Level KibanaHTTPAPIsCoreStatusResponseStatusOverallLevel `json:"level"` + + // Meta An unstructured set of extra metadata about this service. + Meta map[string]interface{} `json:"meta"` + + // Summary A human readable summary of the service status. + Summary string `json:"summary"` + } `json:"overall"` + + // Plugins A dynamic mapping of plugin ID to plugin status. + Plugins map[string]struct { + // Detail Human readable detail of the service status. + Detail *string `json:"detail,omitempty"` + + // DocumentationUrl A URL to further documentation regarding this service. + DocumentationUrl *string `json:"documentationUrl,omitempty"` + + // Level Service status levels as human and machine readable values. + Level KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevel `json:"level"` + + // Meta An unstructured set of extra metadata about this service. + Meta map[string]interface{} `json:"meta"` + + // Summary A human readable summary of the service status. + Summary string `json:"summary"` + } `json:"plugins"` + } `json:"status"` + + // Uuid Unique, generated Kibana instance UUID. This UUID should persist even if the Kibana process restarts. + Uuid string `json:"uuid"` + Version struct { + // BuildDate The date and time of this build. + BuildDate string `json:"build_date"` + + // BuildFlavor The build flavour determines configuration and behavior of Kibana. On premise users will almost always run the "traditional" flavour, while other flavours are reserved for Elastic-specific use cases. + BuildFlavor KibanaHTTPAPIsCoreStatusResponseVersionBuildFlavor `json:"build_flavor"` + + // BuildHash A unique hash value representing the git commit of this Kibana build. + BuildHash string `json:"build_hash"` + + // BuildNumber A monotonically increasing number, each subsequent build will have a higher number. + BuildNumber float32 `json:"build_number"` + + // BuildSnapshot Whether this build is a snapshot build. + BuildSnapshot bool `json:"build_snapshot"` + + // Number A semantic version number. + Number string `json:"number"` + } `json:"version"` +} + +// KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevel Service status levels as human and machine readable values. +type KibanaHTTPAPIsCoreStatusResponseStatusCoreElasticsearchLevel string + +// KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevel Service status levels as human and machine readable values. +type KibanaHTTPAPIsCoreStatusResponseStatusCoreHttpLevel string + +// KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevel Service status levels as human and machine readable values. +type KibanaHTTPAPIsCoreStatusResponseStatusCoreSavedObjectsLevel string + +// KibanaHTTPAPIsCoreStatusResponseStatusOverallLevel Service status levels as human and machine readable values. +type KibanaHTTPAPIsCoreStatusResponseStatusOverallLevel string + +// KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevel Service status levels as human and machine readable values. +type KibanaHTTPAPIsCoreStatusResponseStatusPluginsLevel string + +// KibanaHTTPAPIsCoreStatusResponseVersionBuildFlavor The build flavour determines configuration and behavior of Kibana. On premise users will almost always run the "traditional" flavour, while other flavours are reserved for Elastic-specific use cases. +type KibanaHTTPAPIsCoreStatusResponseVersionBuildFlavor string + +// MachineLearningAPIsMlSync200Response defines model for Machine_learning_APIs_mlSync200Response. +type MachineLearningAPIsMlSync200Response struct { + // DatafeedsAdded If a saved object for an anomaly detection job is missing a datafeed identifier, it is added when you run the sync machine learning saved objects API. + DatafeedsAdded *map[string]MachineLearningAPIsMlSyncResponseDatafeeds `json:"datafeedsAdded,omitempty"` + + // DatafeedsRemoved If a saved object for an anomaly detection job references a datafeed that no longer exists, it is deleted when you run the sync machine learning saved objects API. + DatafeedsRemoved *map[string]MachineLearningAPIsMlSyncResponseDatafeeds `json:"datafeedsRemoved,omitempty"` + + // SavedObjectsCreated If saved objects are missing for machine learning jobs or trained models, they are created when you run the sync machine learning saved objects API. + SavedObjectsCreated *MachineLearningAPIsMlSyncResponseSavedObjectsCreated `json:"savedObjectsCreated,omitempty"` + + // SavedObjectsDeleted If saved objects exist for machine learning jobs or trained models that no longer exist, they are deleted when you run the sync machine learning saved objects API. + SavedObjectsDeleted *MachineLearningAPIsMlSyncResponseSavedObjectsDeleted `json:"savedObjectsDeleted,omitempty"` +} + +// MachineLearningAPIsMlSync4xxResponse defines model for Machine_learning_APIs_mlSync4xxResponse. +type MachineLearningAPIsMlSync4xxResponse struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *int `json:"statusCode,omitempty"` +} + +// MachineLearningAPIsMlSyncResponseAnomalyDetectors The sync machine learning saved objects API response contains this object when there are anomaly detection jobs affected by the synchronization. There is an object for each relevant job, which contains the synchronization status. +type MachineLearningAPIsMlSyncResponseAnomalyDetectors struct { + // Success The success or failure of the synchronization. + Success *MachineLearningAPIsMlSyncResponseSuccess `json:"success,omitempty"` +} + +// MachineLearningAPIsMlSyncResponseDataFrameAnalytics The sync machine learning saved objects API response contains this object when there are data frame analytics jobs affected by the synchronization. There is an object for each relevant job, which contains the synchronization status. +type MachineLearningAPIsMlSyncResponseDataFrameAnalytics struct { + // Success The success or failure of the synchronization. + Success *MachineLearningAPIsMlSyncResponseSuccess `json:"success,omitempty"` +} + +// MachineLearningAPIsMlSyncResponseDatafeeds The sync machine learning saved objects API response contains this object when there are datafeeds affected by the synchronization. There is an object for each relevant datafeed, which contains the synchronization status. +type MachineLearningAPIsMlSyncResponseDatafeeds struct { + // Success The success or failure of the synchronization. + Success *MachineLearningAPIsMlSyncResponseSuccess `json:"success,omitempty"` +} + +// MachineLearningAPIsMlSyncResponseSavedObjectsCreated If saved objects are missing for machine learning jobs or trained models, they are created when you run the sync machine learning saved objects API. +type MachineLearningAPIsMlSyncResponseSavedObjectsCreated struct { + // AnomalyDetector If saved objects are missing for anomaly detection jobs, they are created. + AnomalyDetector *map[string]MachineLearningAPIsMlSyncResponseAnomalyDetectors `json:"anomaly-detector,omitempty"` + + // DataFrameAnalytics If saved objects are missing for data frame analytics jobs, they are created. + DataFrameAnalytics *map[string]MachineLearningAPIsMlSyncResponseDataFrameAnalytics `json:"data-frame-analytics,omitempty"` + + // TrainedModel If saved objects are missing for trained models, they are created. + TrainedModel *map[string]MachineLearningAPIsMlSyncResponseTrainedModels `json:"trained-model,omitempty"` +} + +// MachineLearningAPIsMlSyncResponseSavedObjectsDeleted If saved objects exist for machine learning jobs or trained models that no longer exist, they are deleted when you run the sync machine learning saved objects API. +type MachineLearningAPIsMlSyncResponseSavedObjectsDeleted struct { + // AnomalyDetector If there are saved objects exist for nonexistent anomaly detection jobs, they are deleted. + AnomalyDetector *map[string]MachineLearningAPIsMlSyncResponseAnomalyDetectors `json:"anomaly-detector,omitempty"` + + // DataFrameAnalytics If there are saved objects exist for nonexistent data frame analytics jobs, they are deleted. + DataFrameAnalytics *map[string]MachineLearningAPIsMlSyncResponseDataFrameAnalytics `json:"data-frame-analytics,omitempty"` + + // TrainedModel If there are saved objects exist for nonexistent trained models, they are deleted. + TrainedModel *map[string]MachineLearningAPIsMlSyncResponseTrainedModels `json:"trained-model,omitempty"` +} + +// MachineLearningAPIsMlSyncResponseSuccess The success or failure of the synchronization. +type MachineLearningAPIsMlSyncResponseSuccess = bool + +// MachineLearningAPIsMlSyncResponseTrainedModels The sync machine learning saved objects API response contains this object when there are trained models affected by the synchronization. There is an object for each relevant trained model, which contains the synchronization status. +type MachineLearningAPIsMlSyncResponseTrainedModels struct { + // Success The success or failure of the synchronization. + Success *MachineLearningAPIsMlSyncResponseSuccess `json:"success,omitempty"` +} + +// ObservabilityAIAssistantAPIFunction defines model for Observability_AI_Assistant_API_Function. +type ObservabilityAIAssistantAPIFunction struct { + // Description The description of the function. + Description *string `json:"description,omitempty"` + + // Name The name of the function. + Name *string `json:"name,omitempty"` + + // Parameters The parameters of the function. + Parameters *map[string]interface{} `json:"parameters,omitempty"` +} + +// ObservabilityAIAssistantAPIFunctionCall Details of the function call within the message. +type ObservabilityAIAssistantAPIFunctionCall struct { + // Arguments The arguments for the function call. + Arguments *string `json:"arguments,omitempty"` + + // Name The name of the function. + Name string `json:"name"` + + // Trigger The trigger of the function call. + Trigger ObservabilityAIAssistantAPIFunctionCallTrigger `json:"trigger"` +} + +// ObservabilityAIAssistantAPIFunctionCallTrigger The trigger of the function call. +type ObservabilityAIAssistantAPIFunctionCallTrigger string + +// ObservabilityAIAssistantAPIInstruction defines model for Observability_AI_Assistant_API_Instruction. +type ObservabilityAIAssistantAPIInstruction struct { + union json.RawMessage +} + +// ObservabilityAIAssistantAPIInstruction0 A simple instruction represented as a string. +type ObservabilityAIAssistantAPIInstruction0 = string + +// ObservabilityAIAssistantAPIInstruction1 A detailed instruction with an ID and text. +type ObservabilityAIAssistantAPIInstruction1 struct { + // Id A unique identifier for the instruction. + Id string `json:"id"` + + // Text The text of the instruction. + Text string `json:"text"` +} + +// ObservabilityAIAssistantAPIMessage defines model for Observability_AI_Assistant_API_Message. +type ObservabilityAIAssistantAPIMessage struct { + // Timestamp The timestamp when the message was created. + Timestamp string `json:"@timestamp"` + + // Message The main content of the message. + Message struct { + // Content The content of the message. + Content *string `json:"content,omitempty"` + + // Data Additional data associated with the message. + Data *string `json:"data,omitempty"` + + // Event The event related to the message. + Event *string `json:"event,omitempty"` + + // FunctionCall Details of the function call within the message. + FunctionCall *ObservabilityAIAssistantAPIFunctionCall `json:"function_call,omitempty"` + + // Name The name associated with the message. + Name *string `json:"name,omitempty"` + + // Role The role of the message sender. + Role ObservabilityAIAssistantAPIMessageRoleEnum `json:"role"` + } `json:"message"` +} + +// ObservabilityAIAssistantAPIMessageRoleEnum The role of the message sender. +type ObservabilityAIAssistantAPIMessageRoleEnum string + +// SLOs400Response defines model for SLOs_400_response. +type SLOs400Response struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode float32 `json:"statusCode"` +} + +// SLOs401Response defines model for SLOs_401_response. +type SLOs401Response struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode float32 `json:"statusCode"` +} + +// SLOs403Response defines model for SLOs_403_response. +type SLOs403Response struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode float32 `json:"statusCode"` +} + +// SLOs404Response defines model for SLOs_404_response. +type SLOs404Response struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode float32 `json:"statusCode"` +} + +// SLOs409Response defines model for SLOs_409_response. +type SLOs409Response struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode float32 `json:"statusCode"` +} + +// SLOsBudgetingMethod The budgeting method to use when computing the rollup data. +type SLOsBudgetingMethod string + +// SLOsBulkDeleteRequest The bulk delete SLO request takes a list of SLOs Definition id to delete. +type SLOsBulkDeleteRequest struct { + // List An array of SLO Definition id + List []string `json:"list"` +} + +// SLOsBulkDeleteResponse The bulk delete SLO response returns a taskId that can be used to poll for its status +type SLOsBulkDeleteResponse struct { + // TaskId The taskId of the bulk delete operation + TaskId *string `json:"taskId,omitempty"` +} + +// SLOsBulkDeleteStatusResponse Indicates if the bulk deletion is completed, with the detailed results of the operation. +type SLOsBulkDeleteStatusResponse struct { + // Error The error message if the bulk deletion operation failed + Error *string `json:"error,omitempty"` + + // IsDone Indicates if the bulk deletion operation is completed + IsDone *bool `json:"isDone,omitempty"` + + // Results The results of the bulk deletion operation, including the success status and any errors for each SLO + Results *[]struct { + // Error The error message if the deletion operation failed for this SLO + Error *string `json:"error,omitempty"` + + // Id The ID of the SLO that was deleted + Id *string `json:"id,omitempty"` + + // Success The result of the deletion operation for this SLO + Success *bool `json:"success,omitempty"` + } `json:"results,omitempty"` +} + +// SLOsBulkPurgeRollupRequest The bulk purge rollup data request takes a list of SLO ids and a purge policy, then deletes the rollup data according to the purge policy. This API can be used to remove the staled data of an instance SLO that no longer get updated. +type SLOsBulkPurgeRollupRequest struct { + // List An array of slo ids + List []string `json:"list"` + + // PurgePolicy Policy that dictates which SLI documents to purge based on age + PurgePolicy SLOsBulkPurgeRollupRequest_PurgePolicy `json:"purgePolicy"` +} + +// SLOsBulkPurgeRollupRequestPurgePolicy0 defines model for . +type SLOsBulkPurgeRollupRequestPurgePolicy0 struct { + // Age The duration to determine which documents to purge, formatted as {duration}{unit}. This value should be greater than or equal to the time window of every SLO provided. + Age *string `json:"age,omitempty"` + + // PurgeType Specifies whether documents will be purged based on a specific age or on a timestamp + PurgeType *SLOsBulkPurgeRollupRequestPurgePolicy0PurgeType `json:"purgeType,omitempty"` +} + +// SLOsBulkPurgeRollupRequestPurgePolicy0PurgeType Specifies whether documents will be purged based on a specific age or on a timestamp +type SLOsBulkPurgeRollupRequestPurgePolicy0PurgeType string + +// SLOsBulkPurgeRollupRequestPurgePolicy1 defines model for . +type SLOsBulkPurgeRollupRequestPurgePolicy1 struct { + // PurgeType Specifies whether documents will be purged based on a specific age or on a timestamp + PurgeType *SLOsBulkPurgeRollupRequestPurgePolicy1PurgeType `json:"purgeType,omitempty"` + + // Timestamp The timestamp to determine which documents to purge, formatted in ISO. This value should be older than the applicable time window of every SLO provided. + Timestamp *string `json:"timestamp,omitempty"` +} + +// SLOsBulkPurgeRollupRequestPurgePolicy1PurgeType Specifies whether documents will be purged based on a specific age or on a timestamp +type SLOsBulkPurgeRollupRequestPurgePolicy1PurgeType string + +// SLOsBulkPurgeRollupRequest_PurgePolicy Policy that dictates which SLI documents to purge based on age +type SLOsBulkPurgeRollupRequest_PurgePolicy struct { + union json.RawMessage +} + +// SLOsBulkPurgeRollupResponse The bulk purge rollup data response returns a task id from the elasticsearch deleteByQuery response. +type SLOsBulkPurgeRollupResponse struct { + // TaskId The task id of the purge operation + TaskId *string `json:"taskId,omitempty"` +} + +// SLOsCreateSloRequest The create SLO API request body varies depending on the type of indicator, time window and budgeting method. +type SLOsCreateSloRequest struct { + // BudgetingMethod The budgeting method to use when computing the rollup data. + BudgetingMethod SLOsBudgetingMethod `json:"budgetingMethod"` + + // Description A description for the SLO. + Description string `json:"description"` + + // GroupBy optional group by field or fields to use to generate an SLO per distinct value + GroupBy *SLOsGroupBy `json:"groupBy,omitempty"` + + // Id A optional and unique identifier for the SLO. Must be between 8 and 36 chars + Id *string `json:"id,omitempty"` + Indicator SLOsCreateSloRequest_Indicator `json:"indicator"` + + // Name A name for the SLO. + Name string `json:"name"` + + // Objective Defines properties for the SLO objective + Objective SLOsObjective `json:"objective"` + + // Settings Defines properties for SLO settings. + Settings *SLOsSettings `json:"settings,omitempty"` + + // Tags List of tags + Tags *[]string `json:"tags,omitempty"` + + // TimeWindow Defines properties for the SLO time window + TimeWindow SLOsTimeWindow `json:"timeWindow"` +} + +// SLOsCreateSloRequest_Indicator defines model for SLOsCreateSloRequest.Indicator. +type SLOsCreateSloRequest_Indicator struct { + union json.RawMessage +} + +// SLOsCreateSloResponse defines model for SLOs_create_slo_response. +type SLOsCreateSloResponse struct { + Id string `json:"id"` +} + +// SLOsDeleteSloInstancesRequest The delete SLO instances request takes a list of SLO id and instance id, then delete the rollup and summary data. This API can be used to remove the staled data of an instance SLO that no longer get updated. +type SLOsDeleteSloInstancesRequest struct { + // List An array of slo id and instance id + List []struct { + // InstanceId The SLO instance identifier + InstanceId string `json:"instanceId"` + + // SloId The SLO unique identifier + SloId string `json:"sloId"` + } `json:"list"` +} + +// SLOsErrorBudget defines model for SLOs_error_budget. +type SLOsErrorBudget struct { + // Consumed The error budget consummed, as a percentage of the initial value. + Consumed float32 `json:"consumed"` + + // Initial The initial error budget, as 1 - objective + Initial float32 `json:"initial"` + + // IsEstimated Only for SLO defined with occurrences budgeting method and calendar aligned time window. + IsEstimated bool `json:"isEstimated"` + + // Remaining The error budget remaining, as a percentage of the initial value. + Remaining float32 `json:"remaining"` +} + +// SLOsFilter Defines properties for a filter +type SLOsFilter struct { + // Meta Defines properties for a filter + Meta *SLOsFilterMeta `json:"meta,omitempty"` + Query *map[string]interface{} `json:"query,omitempty"` +} + +// SLOsFilterMeta Defines properties for a filter +type SLOsFilterMeta struct { + Alias *string `json:"alias,omitempty"` + ControlledBy *string `json:"controlledBy,omitempty"` + Disabled *bool `json:"disabled,omitempty"` + Field *string `json:"field,omitempty"` + Group *string `json:"group,omitempty"` + Index *string `json:"index,omitempty"` + IsMultiIndex *bool `json:"isMultiIndex,omitempty"` + Key *string `json:"key,omitempty"` + Negate *bool `json:"negate,omitempty"` + Params *map[string]interface{} `json:"params,omitempty"` + Type *string `json:"type,omitempty"` + Value *string `json:"value,omitempty"` +} + +// SLOsFindSloDefinitionsResponse A paginated response of SLO definitions matching the query. +type SLOsFindSloDefinitionsResponse struct { + union json.RawMessage +} + +// SLOsFindSloDefinitionsResponse0 defines model for . +type SLOsFindSloDefinitionsResponse0 struct { + Page *float32 `json:"page,omitempty"` + PerPage *float32 `json:"perPage,omitempty"` + Results *[]SLOsSloWithSummaryResponse `json:"results,omitempty"` + Total *float32 `json:"total,omitempty"` +} + +// SLOsFindSloDefinitionsResponse1 defines model for . +type SLOsFindSloDefinitionsResponse1 struct { + // Page for backward compability + Page *float32 `json:"page,omitempty"` + + // PerPage for backward compability + PerPage *float32 `json:"perPage,omitempty"` + Results *[]SLOsSloWithSummaryResponse `json:"results,omitempty"` + + // SearchAfter the cursor to provide to get the next paged results + SearchAfter *[]string `json:"searchAfter,omitempty"` + Size *float32 `json:"size,omitempty"` + Total *float32 `json:"total,omitempty"` +} + +// SLOsFindSloResponse A paginated response of SLOs matching the query. +type SLOsFindSloResponse struct { + Page *float32 `json:"page,omitempty"` + PerPage *float32 `json:"perPage,omitempty"` + Results *[]SLOsSloWithSummaryResponse `json:"results,omitempty"` + SearchAfter *string `json:"searchAfter,omitempty"` + + // Size Size provided for cursor based pagination + Size *float32 `json:"size,omitempty"` + Total *float32 `json:"total,omitempty"` +} + +// SLOsGroupBy optional group by field or fields to use to generate an SLO per distinct value +type SLOsGroupBy struct { + union json.RawMessage +} + +// SLOsGroupBy0 defines model for . +type SLOsGroupBy0 = string + +// SLOsGroupBy1 defines model for . +type SLOsGroupBy1 = []string + +// SLOsIndicatorPropertiesApmAvailability Defines properties for the APM availability indicator type +type SLOsIndicatorPropertiesApmAvailability struct { + // Params An object containing the indicator parameters. + Params struct { + // Environment The APM service environment or "*" + Environment string `json:"environment"` + + // Filter KQL query used for filtering the data + Filter *string `json:"filter,omitempty"` + + // Index The index used by APM metrics + Index string `json:"index"` + + // Service The APM service name + Service string `json:"service"` + + // TransactionName The APM transaction name or "*" + TransactionName string `json:"transactionName"` + + // TransactionType The APM transaction type or "*" + TransactionType string `json:"transactionType"` + } `json:"params"` + + // Type The type of indicator. + Type string `json:"type"` +} + +// SLOsIndicatorPropertiesApmLatency Defines properties for the APM latency indicator type +type SLOsIndicatorPropertiesApmLatency struct { + // Params An object containing the indicator parameters. + Params struct { + // Environment The APM service environment or "*" + Environment string `json:"environment"` + + // Filter KQL query used for filtering the data + Filter *string `json:"filter,omitempty"` + + // Index The index used by APM metrics + Index string `json:"index"` + + // Service The APM service name + Service string `json:"service"` + + // Threshold The latency threshold in milliseconds + Threshold float32 `json:"threshold"` + + // TransactionName The APM transaction name or "*" + TransactionName string `json:"transactionName"` + + // TransactionType The APM transaction type or "*" + TransactionType string `json:"transactionType"` + } `json:"params"` + + // Type The type of indicator. + Type string `json:"type"` +} + +// SLOsIndicatorPropertiesCustomKql Defines properties for a custom query indicator type +type SLOsIndicatorPropertiesCustomKql struct { + // Params An object containing the indicator parameters. + Params struct { + // DataViewId The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. + DataViewId *string `json:"dataViewId,omitempty"` + + // Filter Defines properties for a filter + Filter *SLOsKqlWithFilters `json:"filter,omitempty"` + + // Good The KQL query used to define the good events. + Good SLOsKqlWithFiltersGood `json:"good"` + + // Index The index or index pattern to use + Index string `json:"index"` + + // TimestampField The timestamp field used in the source indice. + TimestampField string `json:"timestampField"` + + // Total The KQL query used to define all events. + Total SLOsKqlWithFiltersTotal `json:"total"` + } `json:"params"` + + // Type The type of indicator. + Type string `json:"type"` +} + +// SLOsIndicatorPropertiesCustomMetric Defines properties for a custom metric indicator type +type SLOsIndicatorPropertiesCustomMetric struct { + // Params An object containing the indicator parameters. + Params struct { + // DataViewId The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. + DataViewId *string `json:"dataViewId,omitempty"` + + // Filter the KQL query to filter the documents with. + Filter *string `json:"filter,omitempty"` + + // Good An object defining the "good" metrics and equation + Good struct { + // Equation The equation to calculate the "good" metric. + Equation string `json:"equation"` + + // Metrics List of metrics with their name, aggregation type, and field. + Metrics []SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item `json:"metrics"` + } `json:"good"` + + // Index The index or index pattern to use + Index string `json:"index"` + + // TimestampField The timestamp field used in the source indice. + TimestampField string `json:"timestampField"` + + // Total An object defining the "total" metrics and equation + Total struct { + // Equation The equation to calculate the "total" metric. + Equation string `json:"equation"` + + // Metrics List of metrics with their name, aggregation type, and field. + Metrics []SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item `json:"metrics"` + } `json:"total"` + } `json:"params"` + + // Type The type of indicator. + Type string `json:"type"` +} + +// SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0 defines model for . +type SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0 struct { + // Aggregation The aggregation type of the metric. + Aggregation SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0Aggregation `json:"aggregation"` + + // Field The field of the metric. + Field string `json:"field"` + + // Filter The filter to apply to the metric. + Filter *string `json:"filter,omitempty"` + + // Name The name of the metric. Only valid options are A-Z + Name string `json:"name"` +} + +// SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0Aggregation The aggregation type of the metric. +type SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0Aggregation string + +// SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1 defines model for . +type SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1 struct { + // Aggregation The aggregation type of the metric. + Aggregation SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1Aggregation `json:"aggregation"` + + // Filter The filter to apply to the metric. + Filter *string `json:"filter,omitempty"` + + // Name The name of the metric. Only valid options are A-Z + Name string `json:"name"` +} + +// SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1Aggregation The aggregation type of the metric. +type SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1Aggregation string + +// SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item defines model for SLOs_indicator_properties_custom_metric.params.good.metrics.Item. +type SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item struct { + union json.RawMessage +} + +// SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0 defines model for . +type SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0 struct { + // Aggregation The aggregation type of the metric. + Aggregation SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0Aggregation `json:"aggregation"` + + // Field The field of the metric. + Field string `json:"field"` + + // Filter The filter to apply to the metric. + Filter *string `json:"filter,omitempty"` + + // Name The name of the metric. Only valid options are A-Z + Name string `json:"name"` +} + +// SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0Aggregation The aggregation type of the metric. +type SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0Aggregation string + +// SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1 defines model for . +type SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1 struct { + // Aggregation The aggregation type of the metric. + Aggregation SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1Aggregation `json:"aggregation"` + + // Filter The filter to apply to the metric. + Filter *string `json:"filter,omitempty"` + + // Name The name of the metric. Only valid options are A-Z + Name string `json:"name"` +} + +// SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1Aggregation The aggregation type of the metric. +type SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1Aggregation string + +// SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item defines model for SLOs_indicator_properties_custom_metric.params.total.metrics.Item. +type SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item struct { + union json.RawMessage +} + +// SLOsIndicatorPropertiesHistogram Defines properties for a histogram indicator type +type SLOsIndicatorPropertiesHistogram struct { + // Params An object containing the indicator parameters. + Params struct { + // DataViewId The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. + DataViewId *string `json:"dataViewId,omitempty"` + + // Filter the KQL query to filter the documents with. + Filter *string `json:"filter,omitempty"` + + // Good An object defining the "good" events + Good struct { + // Aggregation The type of aggregation to use. + Aggregation SLOsIndicatorPropertiesHistogramParamsGoodAggregation `json:"aggregation"` + + // Field The field use to aggregate the good events. + Field string `json:"field"` + + // Filter The filter for good events. + Filter *string `json:"filter,omitempty"` + + // From The starting value of the range. Only required for "range" aggregations. + From *float32 `json:"from,omitempty"` + + // To The ending value of the range. Only required for "range" aggregations. + To *float32 `json:"to,omitempty"` + } `json:"good"` + + // Index The index or index pattern to use + Index string `json:"index"` + + // TimestampField The timestamp field used in the source indice. + TimestampField string `json:"timestampField"` + + // Total An object defining the "total" events + Total struct { + // Aggregation The type of aggregation to use. + Aggregation SLOsIndicatorPropertiesHistogramParamsTotalAggregation `json:"aggregation"` + + // Field The field use to aggregate the good events. + Field string `json:"field"` + + // Filter The filter for total events. + Filter *string `json:"filter,omitempty"` + + // From The starting value of the range. Only required for "range" aggregations. + From *float32 `json:"from,omitempty"` + + // To The ending value of the range. Only required for "range" aggregations. + To *float32 `json:"to,omitempty"` + } `json:"total"` + } `json:"params"` + + // Type The type of indicator. + Type string `json:"type"` +} + +// SLOsIndicatorPropertiesHistogramParamsGoodAggregation The type of aggregation to use. +type SLOsIndicatorPropertiesHistogramParamsGoodAggregation string + +// SLOsIndicatorPropertiesHistogramParamsTotalAggregation The type of aggregation to use. +type SLOsIndicatorPropertiesHistogramParamsTotalAggregation string + +// SLOsIndicatorPropertiesTimesliceMetric Defines properties for a timeslice metric indicator type +type SLOsIndicatorPropertiesTimesliceMetric struct { + // Params An object containing the indicator parameters. + Params struct { + // DataViewId The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. + DataViewId *string `json:"dataViewId,omitempty"` + + // Filter the KQL query to filter the documents with. + Filter *string `json:"filter,omitempty"` + + // Index The index or index pattern to use + Index string `json:"index"` + + // Metric An object defining the metrics, equation, and threshold to determine if it's a good slice or not + Metric struct { + // Comparator The comparator to use to compare the equation to the threshold. + Comparator SLOsIndicatorPropertiesTimesliceMetricParamsMetricComparator `json:"comparator"` + + // Equation The equation to calculate the metric. + Equation string `json:"equation"` + + // Metrics List of metrics with their name, aggregation type, and field. + Metrics []SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item `json:"metrics"` + + // Threshold The threshold used to determine if the metric is a good slice or not. + Threshold float32 `json:"threshold"` + } `json:"metric"` + + // TimestampField The timestamp field used in the source indice. + TimestampField string `json:"timestampField"` + } `json:"params"` + + // Type The type of indicator. + Type string `json:"type"` +} + +// SLOsIndicatorPropertiesTimesliceMetricParamsMetricComparator The comparator to use to compare the equation to the threshold. +type SLOsIndicatorPropertiesTimesliceMetricParamsMetricComparator string + +// SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item defines model for SLOsIndicatorPropertiesTimesliceMetric.Params.Metric.metrics.Item. +type SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item struct { + union json.RawMessage +} + +// SLOsKqlWithFilters Defines properties for a filter +type SLOsKqlWithFilters struct { + union json.RawMessage +} + +// SLOsKqlWithFilters0 the KQL query to filter the documents with. +type SLOsKqlWithFilters0 = string + +// SLOsKqlWithFilters1 defines model for . +type SLOsKqlWithFilters1 struct { + Filters *[]SLOsFilter `json:"filters,omitempty"` + KqlQuery *string `json:"kqlQuery,omitempty"` +} + +// SLOsKqlWithFiltersGood The KQL query used to define the good events. +type SLOsKqlWithFiltersGood struct { + union json.RawMessage +} + +// SLOsKqlWithFiltersGood0 the KQL query to filter the documents with. +type SLOsKqlWithFiltersGood0 = string + +// SLOsKqlWithFiltersGood1 defines model for . +type SLOsKqlWithFiltersGood1 struct { + Filters *[]SLOsFilter `json:"filters,omitempty"` + KqlQuery *string `json:"kqlQuery,omitempty"` +} + +// SLOsKqlWithFiltersTotal The KQL query used to define all events. +type SLOsKqlWithFiltersTotal struct { + union json.RawMessage +} + +// SLOsKqlWithFiltersTotal0 the KQL query to filter the documents with. +type SLOsKqlWithFiltersTotal0 = string + +// SLOsKqlWithFiltersTotal1 defines model for . +type SLOsKqlWithFiltersTotal1 struct { + Filters *[]SLOsFilter `json:"filters,omitempty"` + KqlQuery *string `json:"kqlQuery,omitempty"` +} + +// SLOsObjective Defines properties for the SLO objective +type SLOsObjective struct { + // Target the target objective between 0 and 1 excluded + Target float32 `json:"target"` + + // TimesliceTarget the target objective for each slice when using a timeslices budgeting method + TimesliceTarget *float32 `json:"timesliceTarget,omitempty"` + + // TimesliceWindow the duration of each slice when using a timeslices budgeting method, as {duraton}{unit} + TimesliceWindow *string `json:"timesliceWindow,omitempty"` +} + +// SLOsSettings Defines properties for SLO settings. +type SLOsSettings struct { + // Frequency The interval between checks for changes in the source data. The minimum value is 1m and the maximum is 59m. The default value is 1 minute. + Frequency *string `json:"frequency,omitempty"` + + // PreventInitialBackfill Start aggregating data from the time the SLO is created, instead of backfilling data from the beginning of the time window. + PreventInitialBackfill *bool `json:"preventInitialBackfill,omitempty"` + + // SyncDelay The time delay in minutes between the current time and the latest source data time. Increasing the value will delay any alerting. The default value is 1 minute. The minimum value is 1m and the maximum is 359m. It should always be greater then source index refresh interval. + SyncDelay *string `json:"syncDelay,omitempty"` + + // SyncField The date field that is used to identify new documents in the source. It is strongly recommended to use a field that contains the ingest timestamp. If you use a different field, you might need to set the delay such that it accounts for data transmission delays. When unspecified, we use the indicator timestamp field. + SyncField *string `json:"syncField,omitempty"` +} + +// SLOsSloDefinitionResponse defines model for SLOs_slo_definition_response. +type SLOsSloDefinitionResponse struct { + // BudgetingMethod The budgeting method to use when computing the rollup data. + BudgetingMethod SLOsBudgetingMethod `json:"budgetingMethod"` + + // CreatedAt The creation date + CreatedAt string `json:"createdAt"` + + // Description The description of the SLO. + Description string `json:"description"` + + // Enabled Indicate if the SLO is enabled + Enabled bool `json:"enabled"` + + // GroupBy optional group by field or fields to use to generate an SLO per distinct value + GroupBy SLOsGroupBy `json:"groupBy"` + + // Id The identifier of the SLO. + Id string `json:"id"` + Indicator SLOsSloDefinitionResponse_Indicator `json:"indicator"` + + // Name The name of the SLO. + Name string `json:"name"` + + // Objective Defines properties for the SLO objective + Objective SLOsObjective `json:"objective"` + + // Revision The SLO revision + Revision float32 `json:"revision"` + + // Settings Defines properties for SLO settings. + Settings SLOsSettings `json:"settings"` + + // Tags List of tags + Tags []string `json:"tags"` + + // TimeWindow Defines properties for the SLO time window + TimeWindow SLOsTimeWindow `json:"timeWindow"` + + // UpdatedAt The last update date + UpdatedAt string `json:"updatedAt"` + + // Version The internal SLO version + Version float32 `json:"version"` +} + +// SLOsSloDefinitionResponse_Indicator defines model for SLOsSloDefinitionResponse.Indicator. +type SLOsSloDefinitionResponse_Indicator struct { + union json.RawMessage +} + +// SLOsSloWithSummaryResponse defines model for SLOs_slo_with_summary_response. +type SLOsSloWithSummaryResponse struct { + // BudgetingMethod The budgeting method to use when computing the rollup data. + BudgetingMethod SLOsBudgetingMethod `json:"budgetingMethod"` + + // CreatedAt The creation date + CreatedAt string `json:"createdAt"` + + // Description The description of the SLO. + Description string `json:"description"` + + // Enabled Indicate if the SLO is enabled + Enabled bool `json:"enabled"` + + // GroupBy optional group by field or fields to use to generate an SLO per distinct value + GroupBy SLOsGroupBy `json:"groupBy"` + + // Id The identifier of the SLO. + Id string `json:"id"` + Indicator SLOsSloWithSummaryResponse_Indicator `json:"indicator"` + + // InstanceId the value derived from the groupBy field, if present, otherwise '*' + InstanceId string `json:"instanceId"` + + // Name The name of the SLO. + Name string `json:"name"` + + // Objective Defines properties for the SLO objective + Objective SLOsObjective `json:"objective"` + + // Revision The SLO revision + Revision float32 `json:"revision"` + + // Settings Defines properties for SLO settings. + Settings SLOsSettings `json:"settings"` + + // Summary The SLO computed data + Summary SLOsSummary `json:"summary"` + + // Tags List of tags + Tags []string `json:"tags"` + + // TimeWindow Defines properties for the SLO time window + TimeWindow SLOsTimeWindow `json:"timeWindow"` + + // UpdatedAt The last update date + UpdatedAt string `json:"updatedAt"` + + // Version The internal SLO version + Version float32 `json:"version"` +} + +// SLOsSloWithSummaryResponse_Indicator defines model for SLOsSloWithSummaryResponse.Indicator. +type SLOsSloWithSummaryResponse_Indicator struct { + union json.RawMessage +} + +// SLOsSummary The SLO computed data +type SLOsSummary struct { + ErrorBudget SLOsErrorBudget `json:"errorBudget"` + SliValue float32 `json:"sliValue"` + Status SLOsSummaryStatus `json:"status"` +} + +// SLOsSummaryStatus defines model for SLOs_summary_status. +type SLOsSummaryStatus string + +// SLOsTimeWindow Defines properties for the SLO time window +type SLOsTimeWindow struct { + // Duration the duration formatted as {duration}{unit}. Accepted values for rolling: 7d, 30d, 90d. Accepted values for calendar aligned: 1w (weekly) or 1M (monthly) + Duration string `json:"duration"` + + // Type Indicates weither the time window is a rolling or a calendar aligned time window. + Type SLOsTimeWindowType `json:"type"` +} + +// SLOsTimeWindowType Indicates weither the time window is a rolling or a calendar aligned time window. +type SLOsTimeWindowType string + +// SLOsTimesliceMetricBasicMetricWithField defines model for SLOs_timeslice_metric_basic_metric_with_field. +type SLOsTimesliceMetricBasicMetricWithField struct { + // Aggregation The aggregation type of the metric. + Aggregation SLOsTimesliceMetricBasicMetricWithFieldAggregation `json:"aggregation"` + + // Field The field of the metric. + Field string `json:"field"` + + // Filter The filter to apply to the metric. + Filter *string `json:"filter,omitempty"` + + // Name The name of the metric. Only valid options are A-Z + Name string `json:"name"` +} + +// SLOsTimesliceMetricBasicMetricWithFieldAggregation The aggregation type of the metric. +type SLOsTimesliceMetricBasicMetricWithFieldAggregation string + +// SLOsTimesliceMetricDocCountMetric defines model for SLOs_timeslice_metric_doc_count_metric. +type SLOsTimesliceMetricDocCountMetric struct { + // Aggregation The aggregation type of the metric. Only valid option is "doc_count" + Aggregation SLOsTimesliceMetricDocCountMetricAggregation `json:"aggregation"` + + // Filter The filter to apply to the metric. + Filter *string `json:"filter,omitempty"` + + // Name The name of the metric. Only valid options are A-Z + Name string `json:"name"` +} + +// SLOsTimesliceMetricDocCountMetricAggregation The aggregation type of the metric. Only valid option is "doc_count" +type SLOsTimesliceMetricDocCountMetricAggregation string + +// SLOsTimesliceMetricPercentileMetric defines model for SLOs_timeslice_metric_percentile_metric. +type SLOsTimesliceMetricPercentileMetric struct { + // Aggregation The aggregation type of the metric. Only valid option is "percentile" + Aggregation SLOsTimesliceMetricPercentileMetricAggregation `json:"aggregation"` + + // Field The field of the metric. + Field string `json:"field"` + + // Filter The filter to apply to the metric. + Filter *string `json:"filter,omitempty"` + + // Name The name of the metric. Only valid options are A-Z + Name string `json:"name"` + + // Percentile The percentile value. + Percentile float32 `json:"percentile"` +} + +// SLOsTimesliceMetricPercentileMetricAggregation The aggregation type of the metric. Only valid option is "percentile" +type SLOsTimesliceMetricPercentileMetricAggregation string + +// SLOsUpdateSloRequest The update SLO API request body varies depending on the type of indicator, time window and budgeting method. Partial update is handled. +type SLOsUpdateSloRequest struct { + // BudgetingMethod The budgeting method to use when computing the rollup data. + BudgetingMethod *SLOsBudgetingMethod `json:"budgetingMethod,omitempty"` + + // Description A description for the SLO. + Description *string `json:"description,omitempty"` + + // GroupBy optional group by field or fields to use to generate an SLO per distinct value + GroupBy *SLOsGroupBy `json:"groupBy,omitempty"` + Indicator *SLOsUpdateSloRequest_Indicator `json:"indicator,omitempty"` + + // Name A name for the SLO. + Name *string `json:"name,omitempty"` + + // Objective Defines properties for the SLO objective + Objective *SLOsObjective `json:"objective,omitempty"` + + // Settings Defines properties for SLO settings. + Settings *SLOsSettings `json:"settings,omitempty"` + + // Tags List of tags + Tags *[]string `json:"tags,omitempty"` + + // TimeWindow Defines properties for the SLO time window + TimeWindow *SLOsTimeWindow `json:"timeWindow,omitempty"` +} + +// SLOsUpdateSloRequest_Indicator defines model for SLOsUpdateSloRequest.Indicator. +type SLOsUpdateSloRequest_Indicator struct { + union json.RawMessage +} + +// SavedObjects400Response defines model for Saved_objects_400_response. +type SavedObjects400Response struct { + Error SavedObjects400ResponseError `json:"error"` + Message string `json:"message"` + StatusCode SavedObjects400ResponseStatusCode `json:"statusCode"` +} + +// SavedObjects400ResponseError defines model for SavedObjects400Response.Error. +type SavedObjects400ResponseError string + +// SavedObjects400ResponseStatusCode defines model for SavedObjects400Response.StatusCode. +type SavedObjects400ResponseStatusCode int + +// SavedObjectsAttributes The data that you want to create. WARNING: When you create saved objects, attributes are not validated, which allows you to pass arbitrary and ill-formed data into the API that can break Kibana. Make sure any data that you send to the API is properly formed. +type SavedObjectsAttributes = map[string]interface{} + +// SavedObjectsInitialNamespaces Identifiers for the spaces in which this object is created. If this is provided, the object is created only in the explicitly defined spaces. If this is not provided, the object is created in the current space (default behavior). For shareable object types (registered with `namespaceType: 'multiple'`), this option can be used to specify one or more spaces, including the "All spaces" identifier ('*'). For isolated object types (registered with `namespaceType: 'single'` or `namespaceType: 'multiple-isolated'`), this option can only be used to specify a single space, and the "All spaces" identifier ('*') is not allowed. For global object types (`registered with `namespaceType: agnostic`), this option cannot be used. +type SavedObjectsInitialNamespaces = []interface{} + +// SavedObjectsReferences Objects with `name`, `id`, and `type` properties that describe the other saved objects that this object references. Use `name` in attributes to refer to the other saved object, but never the `id`, which can update automatically during migrations or import and export. +type SavedObjectsReferences = []interface{} + +// SecurityAIAssistantAPIAnonymizationFieldCreateProps defines model for Security_AI_Assistant_API_AnonymizationFieldCreateProps. +type SecurityAIAssistantAPIAnonymizationFieldCreateProps struct { + // Allowed Whether this field is allowed to be sent to the model. + Allowed *bool `json:"allowed,omitempty"` + + // Anonymized Whether this field should be anonymized. + Anonymized *bool `json:"anonymized,omitempty"` + + // Field Name of the anonymization field to create. + Field string `json:"field"` +} + +// SecurityAIAssistantAPIAnonymizationFieldDetailsInError defines model for Security_AI_Assistant_API_AnonymizationFieldDetailsInError. +type SecurityAIAssistantAPIAnonymizationFieldDetailsInError struct { + // Id The ID of the anonymization field. + Id string `json:"id"` + + // Name Name of the anonymization field. + Name *string `json:"name,omitempty"` +} + +// SecurityAIAssistantAPIAnonymizationFieldResponse defines model for Security_AI_Assistant_API_AnonymizationFieldResponse. +type SecurityAIAssistantAPIAnonymizationFieldResponse struct { + // Allowed Whether this field is allowed to be sent to the model. + Allowed *bool `json:"allowed,omitempty"` + + // Anonymized Whether this field should be anonymized. + Anonymized *bool `json:"anonymized,omitempty"` + + // CreatedAt Timestamp of when the anonymization field was created. + CreatedAt *string `json:"createdAt,omitempty"` + + // CreatedBy Username of the person who created the anonymization field. + CreatedBy *string `json:"createdBy,omitempty"` + + // Field Name of the anonymization field. + Field string `json:"field"` + + // Id A string that does not contain only whitespace characters. + Id SecurityAIAssistantAPINonEmptyString `json:"id"` + + // Namespace Kibana space in which this anonymization field exists. + Namespace *string `json:"namespace,omitempty"` + + // Timestamp A string that represents a timestamp in ISO 8601 format and does not contain only whitespace characters. + Timestamp *SecurityAIAssistantAPINonEmptyTimestamp `json:"timestamp,omitempty"` + + // UpdatedAt Timestamp of the last update. + UpdatedAt *string `json:"updatedAt,omitempty"` + + // UpdatedBy Username of the person who last updated the field. + UpdatedBy *string `json:"updatedBy,omitempty"` +} + +// SecurityAIAssistantAPIAnonymizationFieldUpdateProps defines model for Security_AI_Assistant_API_AnonymizationFieldUpdateProps. +type SecurityAIAssistantAPIAnonymizationFieldUpdateProps struct { + // Allowed Whether this field is allowed to be sent to the model. + Allowed *bool `json:"allowed,omitempty"` + + // Anonymized Whether this field should be anonymized. + Anonymized *bool `json:"anonymized,omitempty"` + + // Id The ID of the anonymization field to update. + Id string `json:"id"` +} + +// SecurityAIAssistantAPIAnonymizationFieldsBulkActionSkipReason Reason why the anonymization field was not modified. +type SecurityAIAssistantAPIAnonymizationFieldsBulkActionSkipReason string + +// SecurityAIAssistantAPIAnonymizationFieldsBulkActionSkipResult defines model for Security_AI_Assistant_API_AnonymizationFieldsBulkActionSkipResult. +type SecurityAIAssistantAPIAnonymizationFieldsBulkActionSkipResult struct { + // Id The ID of the anonymization field that was not modified. + Id string `json:"id"` + + // Name Name of the anonymization field that was not modified. + Name *string `json:"name,omitempty"` + + // SkipReason Reason why the anonymization field was not modified. + SkipReason SecurityAIAssistantAPIAnonymizationFieldsBulkActionSkipReason `json:"skip_reason"` +} + +// SecurityAIAssistantAPIAnonymizationFieldsBulkCrudActionResponse defines model for Security_AI_Assistant_API_AnonymizationFieldsBulkCrudActionResponse. +type SecurityAIAssistantAPIAnonymizationFieldsBulkCrudActionResponse struct { + // AnonymizationFieldsCount Total number of anonymization fields processed. + AnonymizationFieldsCount *int `json:"anonymization_fields_count,omitempty"` + Attributes struct { + // Errors List of errors that occurred during the bulk operation. + Errors *[]SecurityAIAssistantAPINormalizedAnonymizationFieldError `json:"errors,omitempty"` + Results SecurityAIAssistantAPIAnonymizationFieldsBulkCrudActionResults `json:"results"` + Summary SecurityAIAssistantAPIBulkCrudActionSummary `json:"summary"` + } `json:"attributes"` + + // Message Message providing information about the bulk action result. + Message *string `json:"message,omitempty"` + + // StatusCode HTTP status code returned. + StatusCode *int `json:"status_code,omitempty"` + + // Success Indicates if the bulk action was successful. + Success *bool `json:"success,omitempty"` +} + +// SecurityAIAssistantAPIAnonymizationFieldsBulkCrudActionResults defines model for Security_AI_Assistant_API_AnonymizationFieldsBulkCrudActionResults. +type SecurityAIAssistantAPIAnonymizationFieldsBulkCrudActionResults struct { + // Created List of anonymization fields successfully created. + Created []SecurityAIAssistantAPIAnonymizationFieldResponse `json:"created"` + Deleted []string `json:"deleted"` + + // Skipped List of anonymization fields that were skipped during the operation. + Skipped []SecurityAIAssistantAPIAnonymizationFieldsBulkActionSkipResult `json:"skipped"` + + // Updated List of anonymization fields successfully updated. + Updated []SecurityAIAssistantAPIAnonymizationFieldResponse `json:"updated"` +} + +// SecurityAIAssistantAPIApiConfig defines model for Security_AI_Assistant_API_ApiConfig. +type SecurityAIAssistantAPIApiConfig struct { + // ActionTypeId Action type ID + ActionTypeId string `json:"actionTypeId"` + + // ConnectorId Connector ID + ConnectorId string `json:"connectorId"` + + // DefaultSystemPromptId Default system prompt ID + DefaultSystemPromptId *string `json:"defaultSystemPromptId,omitempty"` + + // Model Model + Model *string `json:"model,omitempty"` + + // Provider Provider + Provider *SecurityAIAssistantAPIProvider `json:"provider,omitempty"` +} + +// SecurityAIAssistantAPIBaseContentReference The basis of a content reference +type SecurityAIAssistantAPIBaseContentReference struct { + // Id Id of the content reference + Id string `json:"id"` + + // Type Type of the content reference + Type string `json:"type"` +} + +// SecurityAIAssistantAPIBulkCrudActionSummary defines model for Security_AI_Assistant_API_BulkCrudActionSummary. +type SecurityAIAssistantAPIBulkCrudActionSummary struct { + // Failed The number of failed actions. + Failed int `json:"failed"` + + // Skipped The number of skipped actions. + Skipped int `json:"skipped"` + + // Succeeded The number of successfully performed actions. + Succeeded int `json:"succeeded"` + + // Total The total number of actions attempted. + Total int `json:"total"` +} + +// SecurityAIAssistantAPIChatCompleteProps The request payload for creating a chat completion. +type SecurityAIAssistantAPIChatCompleteProps struct { + // ConnectorId Required connector identifier to route the request. + ConnectorId string `json:"connectorId"` + + // ConversationId A string that does not contain only whitespace characters. + ConversationId *SecurityAIAssistantAPINonEmptyString `json:"conversationId,omitempty"` + + // IsStream If true, the response will be streamed in chunks. + IsStream *bool `json:"isStream,omitempty"` + + // LangSmithApiKey API key for LangSmith integration. + LangSmithApiKey *string `json:"langSmithApiKey,omitempty"` + + // LangSmithProject LangSmith project name for tracing. + LangSmithProject *string `json:"langSmithProject,omitempty"` + + // Messages List of chat messages exchanged so far. + Messages []SecurityAIAssistantAPIChatMessage `json:"messages"` + + // Model Model ID or name to use for the response. + Model *string `json:"model,omitempty"` + + // Persist Whether to persist the chat and response to storage. + Persist bool `json:"persist"` + + // PromptId Prompt template identifier. + PromptId *string `json:"promptId,omitempty"` + + // ResponseLanguage ISO language code for the assistant's response. + ResponseLanguage *string `json:"responseLanguage,omitempty"` +} + +// SecurityAIAssistantAPIChatMessage A message exchanged within the AI chat conversation. +type SecurityAIAssistantAPIChatMessage struct { + // Content The textual content of the message. + Content *string `json:"content,omitempty"` + + // Data ECS-style metadata attached to the message. + Data *SecurityAIAssistantAPIMessageData `json:"data,omitempty"` + + // FieldsToAnonymize List of field names within the data object that should be anonymized. + FieldsToAnonymize *[]string `json:"fields_to_anonymize,omitempty"` + + // Role The role associated with the message in the chat. + Role SecurityAIAssistantAPIChatMessageRole `json:"role"` +} + +// SecurityAIAssistantAPIChatMessageRole The role associated with the message in the chat. +type SecurityAIAssistantAPIChatMessageRole string + +// SecurityAIAssistantAPIContentReferences A union of all content reference types +type SecurityAIAssistantAPIContentReferences map[string]SecurityAIAssistantAPIContentReferences_AdditionalProperties + +// SecurityAIAssistantAPIContentReferences_AdditionalProperties defines model for Security_AI_Assistant_API_ContentReferences.AdditionalProperties. +type SecurityAIAssistantAPIContentReferences_AdditionalProperties struct { + union json.RawMessage +} + +// SecurityAIAssistantAPIConversationCategory The conversation category. +type SecurityAIAssistantAPIConversationCategory string + +// SecurityAIAssistantAPIConversationCreateProps defines model for Security_AI_Assistant_API_ConversationCreateProps. +type SecurityAIAssistantAPIConversationCreateProps struct { + ApiConfig *SecurityAIAssistantAPIApiConfig `json:"apiConfig,omitempty"` + + // Category The conversation category. + Category *SecurityAIAssistantAPIConversationCategory `json:"category,omitempty"` + + // ExcludeFromLastConversationStorage Exclude from last conversation storage. + ExcludeFromLastConversationStorage *bool `json:"excludeFromLastConversationStorage,omitempty"` + + // Id The conversation id. + Id *string `json:"id,omitempty"` + + // Messages The conversation messages. + Messages *[]SecurityAIAssistantAPIMessage `json:"messages,omitempty"` + + // Replacements Replacements object used to anonymize/deanonymize messages + Replacements *SecurityAIAssistantAPIReplacements `json:"replacements,omitempty"` + + // Title The conversation title. + Title string `json:"title"` +} + +// SecurityAIAssistantAPIConversationResponse defines model for Security_AI_Assistant_API_ConversationResponse. +type SecurityAIAssistantAPIConversationResponse struct { + ApiConfig *SecurityAIAssistantAPIApiConfig `json:"apiConfig,omitempty"` + + // Category The conversation category. + Category SecurityAIAssistantAPIConversationCategory `json:"category"` + + // CreatedAt The time conversation was created. + CreatedAt string `json:"createdAt"` + + // CreatedBy Could be any string, not necessarily a UUID. + CreatedBy SecurityAIAssistantAPIUser `json:"createdBy"` + + // ExcludeFromLastConversationStorage Exclude from last conversation storage. + ExcludeFromLastConversationStorage *bool `json:"excludeFromLastConversationStorage,omitempty"` + + // Id A string that does not contain only whitespace characters. + Id SecurityAIAssistantAPINonEmptyString `json:"id"` + + // Messages The conversation messages. + Messages *[]SecurityAIAssistantAPIMessage `json:"messages,omitempty"` + + // Namespace Kibana space + Namespace string `json:"namespace"` + + // Replacements Replacements object used to anonymize/deanonymize messages + Replacements *SecurityAIAssistantAPIReplacements `json:"replacements,omitempty"` + + // Timestamp A string that represents a timestamp in ISO 8601 format and does not contain only whitespace characters. + Timestamp *SecurityAIAssistantAPINonEmptyTimestamp `json:"timestamp,omitempty"` + + // Title The conversation title. + Title string `json:"title"` + + // UpdatedAt The last time conversation was updated. + UpdatedAt *string `json:"updatedAt,omitempty"` + Users []SecurityAIAssistantAPIUser `json:"users"` +} + +// SecurityAIAssistantAPIConversationUpdateProps defines model for Security_AI_Assistant_API_ConversationUpdateProps. +type SecurityAIAssistantAPIConversationUpdateProps struct { + ApiConfig *SecurityAIAssistantAPIApiConfig `json:"apiConfig,omitempty"` + + // Category The conversation category. + Category *SecurityAIAssistantAPIConversationCategory `json:"category,omitempty"` + + // ExcludeFromLastConversationStorage Exclude from last conversation storage. + ExcludeFromLastConversationStorage *bool `json:"excludeFromLastConversationStorage,omitempty"` + + // Id A string that does not contain only whitespace characters. + Id SecurityAIAssistantAPINonEmptyString `json:"id"` + + // Messages The conversation messages. + Messages *[]SecurityAIAssistantAPIMessage `json:"messages,omitempty"` + + // Replacements Replacements object used to anonymize/deanonymize messages + Replacements *SecurityAIAssistantAPIReplacements `json:"replacements,omitempty"` + + // Title The conversation title. + Title *string `json:"title,omitempty"` + Users *[]SecurityAIAssistantAPIUser `json:"users,omitempty"` +} + +// SecurityAIAssistantAPIDeleteResponseFields defines model for Security_AI_Assistant_API_DeleteResponseFields. +type SecurityAIAssistantAPIDeleteResponseFields struct { + // Id A string that does not contain only whitespace characters. + Id SecurityAIAssistantAPINonEmptyString `json:"id"` +} + +// SecurityAIAssistantAPIDocumentEntry defines model for Security_AI_Assistant_API_DocumentEntry. +type SecurityAIAssistantAPIDocumentEntry struct { + // CreatedAt Time the Knowledge Base Entry was created. + CreatedAt string `json:"createdAt"` + + // CreatedBy User who created the Knowledge Base Entry. + CreatedBy string `json:"createdBy"` + + // Global Whether this Knowledge Base Entry is global, defaults to false. + Global bool `json:"global"` + + // Id A string that does not contain only whitespace characters. + Id SecurityAIAssistantAPINonEmptyString `json:"id"` + + // KbResource Knowledge Base resource name for grouping entries, e.g. 'security_labs', 'user', etc. + KbResource SecurityAIAssistantAPIKnowledgeBaseResource `json:"kbResource"` + + // Name Name of the Knowledge Base Entry. + Name string `json:"name"` + + // Namespace Kibana Space, defaults to 'default' space. + Namespace string `json:"namespace"` + + // Required Whether this resource should always be included, defaults to false. + Required *bool `json:"required,omitempty"` + + // Source Source document name or filepath. + Source string `json:"source"` + + // Text Knowledge Base Entry content. + Text string `json:"text"` + + // Type Entry type. + Type SecurityAIAssistantAPIDocumentEntryType `json:"type"` + + // UpdatedAt Time the Knowledge Base Entry was last updated. + UpdatedAt string `json:"updatedAt"` + + // UpdatedBy User who last updated the Knowledge Base Entry. + UpdatedBy string `json:"updatedBy"` + + // Users Users who have access to the Knowledge Base Entry, defaults to current user. Empty array provides access to all users. + Users []SecurityAIAssistantAPIUser `json:"users"` + + // Vector Object containing Knowledge Base Entry text embeddings and modelId used to create the embeddings. + Vector *SecurityAIAssistantAPIVector `json:"vector,omitempty"` +} + +// SecurityAIAssistantAPIDocumentEntryType Entry type. +type SecurityAIAssistantAPIDocumentEntryType string + +// SecurityAIAssistantAPIDocumentEntryCreateFields defines model for Security_AI_Assistant_API_DocumentEntryCreateFields. +type SecurityAIAssistantAPIDocumentEntryCreateFields struct { + // Global Whether this Knowledge Base Entry is global, defaults to false. + Global *bool `json:"global,omitempty"` + + // KbResource Knowledge Base resource name for grouping entries, e.g. 'security_labs', 'user', etc. + KbResource SecurityAIAssistantAPIKnowledgeBaseResource `json:"kbResource"` + + // Name Name of the Knowledge Base Entry. + Name string `json:"name"` + + // Namespace Kibana Space, defaults to 'default' space. + Namespace *string `json:"namespace,omitempty"` + + // Required Whether this resource should always be included, defaults to false. + Required *bool `json:"required,omitempty"` + + // Source Source document name or filepath. + Source string `json:"source"` + + // Text Knowledge Base Entry content. + Text string `json:"text"` + + // Type Entry type. + Type SecurityAIAssistantAPIDocumentEntryCreateFieldsType `json:"type"` + + // Users Users who have access to the Knowledge Base Entry, defaults to current user. Empty array provides access to all users. + Users *[]SecurityAIAssistantAPIUser `json:"users,omitempty"` + + // Vector Object containing Knowledge Base Entry text embeddings and modelId used to create the embeddings. + Vector *SecurityAIAssistantAPIVector `json:"vector,omitempty"` +} + +// SecurityAIAssistantAPIDocumentEntryCreateFieldsType Entry type. +type SecurityAIAssistantAPIDocumentEntryCreateFieldsType string + +// SecurityAIAssistantAPIDocumentEntryOptionalFields defines model for Security_AI_Assistant_API_DocumentEntryOptionalFields. +type SecurityAIAssistantAPIDocumentEntryOptionalFields struct { + // Required Whether this resource should always be included, defaults to false. + Required *bool `json:"required,omitempty"` + + // Vector Object containing Knowledge Base Entry text embeddings and modelId used to create the embeddings. + Vector *SecurityAIAssistantAPIVector `json:"vector,omitempty"` +} + +// SecurityAIAssistantAPIDocumentEntryRequiredFields defines model for Security_AI_Assistant_API_DocumentEntryRequiredFields. +type SecurityAIAssistantAPIDocumentEntryRequiredFields struct { + // KbResource Knowledge Base resource name for grouping entries, e.g. 'security_labs', 'user', etc. + KbResource SecurityAIAssistantAPIKnowledgeBaseResource `json:"kbResource"` + + // Source Source document name or filepath. + Source string `json:"source"` + + // Text Knowledge Base Entry content. + Text string `json:"text"` + + // Type Entry type. + Type SecurityAIAssistantAPIDocumentEntryRequiredFieldsType `json:"type"` +} + +// SecurityAIAssistantAPIDocumentEntryRequiredFieldsType Entry type. +type SecurityAIAssistantAPIDocumentEntryRequiredFieldsType string + +// SecurityAIAssistantAPIDocumentEntryResponseFields defines model for Security_AI_Assistant_API_DocumentEntryResponseFields. +type SecurityAIAssistantAPIDocumentEntryResponseFields struct { + // KbResource Knowledge Base resource name for grouping entries, e.g. 'security_labs', 'user', etc. + KbResource SecurityAIAssistantAPIKnowledgeBaseResource `json:"kbResource"` + + // Required Whether this resource should always be included, defaults to false. + Required *bool `json:"required,omitempty"` + + // Source Source document name or filepath. + Source string `json:"source"` + + // Text Knowledge Base Entry content. + Text string `json:"text"` + + // Type Entry type. + Type SecurityAIAssistantAPIDocumentEntryResponseFieldsType `json:"type"` + + // Vector Object containing Knowledge Base Entry text embeddings and modelId used to create the embeddings. + Vector *SecurityAIAssistantAPIVector `json:"vector,omitempty"` +} + +// SecurityAIAssistantAPIDocumentEntryResponseFieldsType Entry type. +type SecurityAIAssistantAPIDocumentEntryResponseFieldsType string + +// SecurityAIAssistantAPIDocumentEntryUpdateFields defines model for Security_AI_Assistant_API_DocumentEntryUpdateFields. +type SecurityAIAssistantAPIDocumentEntryUpdateFields struct { + // Global Whether this Knowledge Base Entry is global, defaults to false. + Global *bool `json:"global,omitempty"` + + // Id A string that does not contain only whitespace characters. + Id SecurityAIAssistantAPINonEmptyString `json:"id"` + + // KbResource Knowledge Base resource name for grouping entries, e.g. 'security_labs', 'user', etc. + KbResource SecurityAIAssistantAPIKnowledgeBaseResource `json:"kbResource"` + + // Name Name of the Knowledge Base Entry. + Name string `json:"name"` + + // Namespace Kibana Space, defaults to 'default' space. + Namespace *string `json:"namespace,omitempty"` + + // Required Whether this resource should always be included, defaults to false. + Required *bool `json:"required,omitempty"` + + // Source Source document name or filepath. + Source string `json:"source"` + + // Text Knowledge Base Entry content. + Text string `json:"text"` + + // Type Entry type. + Type SecurityAIAssistantAPIDocumentEntryUpdateFieldsType `json:"type"` + + // Users Users who have access to the Knowledge Base Entry, defaults to current user. Empty array provides access to all users. + Users *[]SecurityAIAssistantAPIUser `json:"users,omitempty"` + + // Vector Object containing Knowledge Base Entry text embeddings and modelId used to create the embeddings. + Vector *SecurityAIAssistantAPIVector `json:"vector,omitempty"` +} + +// SecurityAIAssistantAPIDocumentEntryUpdateFieldsType Entry type. +type SecurityAIAssistantAPIDocumentEntryUpdateFieldsType string + +// SecurityAIAssistantAPIEsqlContentReference defines model for Security_AI_Assistant_API_EsqlContentReference. +type SecurityAIAssistantAPIEsqlContentReference struct { + // Id Id of the content reference + Id string `json:"id"` + + // Label Label of the query + Label string `json:"label"` + + // Query An ESQL query + Query string `json:"query"` + + // Timerange Time range to select in the time picker. + Timerange *struct { + From string `json:"from"` + To string `json:"to"` + } `json:"timerange,omitempty"` + Type SecurityAIAssistantAPIEsqlContentReferenceType `json:"type"` +} + +// SecurityAIAssistantAPIEsqlContentReferenceType defines model for SecurityAIAssistantAPIEsqlContentReference.Type. +type SecurityAIAssistantAPIEsqlContentReferenceType string + +// SecurityAIAssistantAPIFindAnonymizationFieldsSortField defines model for Security_AI_Assistant_API_FindAnonymizationFieldsSortField. +type SecurityAIAssistantAPIFindAnonymizationFieldsSortField string + +// SecurityAIAssistantAPIFindConversationsSortField The field by which to sort the conversations. Possible values are `created_at`, `title`, and `updated_at`. +type SecurityAIAssistantAPIFindConversationsSortField string + +// SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortField Fields available for sorting Knowledge Base Entries. +type SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortField string + +// SecurityAIAssistantAPIFindPromptsSortField Field by which to sort the prompts. +type SecurityAIAssistantAPIFindPromptsSortField string + +// SecurityAIAssistantAPIHrefContentReference defines model for Security_AI_Assistant_API_HrefContentReference. +type SecurityAIAssistantAPIHrefContentReference struct { + // Href URL to the external resource + Href string `json:"href"` + + // Id Id of the content reference + Id string `json:"id"` + + // Label Label of the query + Label *string `json:"label,omitempty"` + Type SecurityAIAssistantAPIHrefContentReferenceType `json:"type"` +} + +// SecurityAIAssistantAPIHrefContentReferenceType defines model for SecurityAIAssistantAPIHrefContentReference.Type. +type SecurityAIAssistantAPIHrefContentReferenceType string + +// SecurityAIAssistantAPIIndexEntry defines model for Security_AI_Assistant_API_IndexEntry. +type SecurityAIAssistantAPIIndexEntry struct { + // CreatedAt Time the Knowledge Base Entry was created. + CreatedAt string `json:"createdAt"` + + // CreatedBy User who created the Knowledge Base Entry. + CreatedBy string `json:"createdBy"` + + // Description Description for when this index or data stream should be queried for Knowledge Base content. Passed to the LLM as a tool description. + Description string `json:"description"` + + // Field Field to query for Knowledge Base content. + Field string `json:"field"` + + // Global Whether this Knowledge Base Entry is global, defaults to false. + Global bool `json:"global"` + + // Id A string that does not contain only whitespace characters. + Id SecurityAIAssistantAPINonEmptyString `json:"id"` + + // Index Index or Data Stream to query for Knowledge Base content. + Index string `json:"index"` + + // InputSchema Array of objects defining the input schema, allowing the LLM to extract structured data to be used in retrieval. + InputSchema *SecurityAIAssistantAPIInputSchema `json:"inputSchema,omitempty"` + + // Name Name of the Knowledge Base Entry. + Name string `json:"name"` + + // Namespace Kibana Space, defaults to 'default' space. + Namespace string `json:"namespace"` + + // OutputFields Fields to extract from the query result, defaults to all fields if not provided or empty. + OutputFields *[]string `json:"outputFields,omitempty"` + + // QueryDescription Description of query field used to fetch Knowledge Base content. Passed to the LLM as part of the tool input schema. + QueryDescription string `json:"queryDescription"` + + // Type Entry type. + Type SecurityAIAssistantAPIIndexEntryType `json:"type"` + + // UpdatedAt Time the Knowledge Base Entry was last updated. + UpdatedAt string `json:"updatedAt"` + + // UpdatedBy User who last updated the Knowledge Base Entry. + UpdatedBy string `json:"updatedBy"` + + // Users Users who have access to the Knowledge Base Entry, defaults to current user. Empty array provides access to all users. + Users []SecurityAIAssistantAPIUser `json:"users"` +} + +// SecurityAIAssistantAPIIndexEntryType Entry type. +type SecurityAIAssistantAPIIndexEntryType string + +// SecurityAIAssistantAPIIndexEntryCreateFields defines model for Security_AI_Assistant_API_IndexEntryCreateFields. +type SecurityAIAssistantAPIIndexEntryCreateFields struct { + // Description Description for when this index or data stream should be queried for Knowledge Base content. Passed to the LLM as a tool description. + Description string `json:"description"` + + // Field Field to query for Knowledge Base content. + Field string `json:"field"` + + // Global Whether this Knowledge Base Entry is global, defaults to false. + Global *bool `json:"global,omitempty"` + + // Index Index or Data Stream to query for Knowledge Base content. + Index string `json:"index"` + + // InputSchema Array of objects defining the input schema, allowing the LLM to extract structured data to be used in retrieval. + InputSchema *SecurityAIAssistantAPIInputSchema `json:"inputSchema,omitempty"` + + // Name Name of the Knowledge Base Entry. + Name string `json:"name"` + + // Namespace Kibana Space, defaults to 'default' space. + Namespace *string `json:"namespace,omitempty"` + + // OutputFields Fields to extract from the query result, defaults to all fields if not provided or empty. + OutputFields *[]string `json:"outputFields,omitempty"` + + // QueryDescription Description of query field used to fetch Knowledge Base content. Passed to the LLM as part of the tool input schema. + QueryDescription string `json:"queryDescription"` + + // Type Entry type. + Type SecurityAIAssistantAPIIndexEntryCreateFieldsType `json:"type"` + + // Users Users who have access to the Knowledge Base Entry, defaults to current user. Empty array provides access to all users. + Users *[]SecurityAIAssistantAPIUser `json:"users,omitempty"` +} + +// SecurityAIAssistantAPIIndexEntryCreateFieldsType Entry type. +type SecurityAIAssistantAPIIndexEntryCreateFieldsType string + +// SecurityAIAssistantAPIIndexEntryOptionalFields defines model for Security_AI_Assistant_API_IndexEntryOptionalFields. +type SecurityAIAssistantAPIIndexEntryOptionalFields struct { + // InputSchema Array of objects defining the input schema, allowing the LLM to extract structured data to be used in retrieval. + InputSchema *SecurityAIAssistantAPIInputSchema `json:"inputSchema,omitempty"` + + // OutputFields Fields to extract from the query result, defaults to all fields if not provided or empty. + OutputFields *[]string `json:"outputFields,omitempty"` +} + +// SecurityAIAssistantAPIIndexEntryRequiredFields defines model for Security_AI_Assistant_API_IndexEntryRequiredFields. +type SecurityAIAssistantAPIIndexEntryRequiredFields struct { + // Description Description for when this index or data stream should be queried for Knowledge Base content. Passed to the LLM as a tool description. + Description string `json:"description"` + + // Field Field to query for Knowledge Base content. + Field string `json:"field"` + + // Index Index or Data Stream to query for Knowledge Base content. + Index string `json:"index"` + + // QueryDescription Description of query field used to fetch Knowledge Base content. Passed to the LLM as part of the tool input schema. + QueryDescription string `json:"queryDescription"` + + // Type Entry type. + Type SecurityAIAssistantAPIIndexEntryRequiredFieldsType `json:"type"` +} + +// SecurityAIAssistantAPIIndexEntryRequiredFieldsType Entry type. +type SecurityAIAssistantAPIIndexEntryRequiredFieldsType string + +// SecurityAIAssistantAPIIndexEntryResponseFields defines model for Security_AI_Assistant_API_IndexEntryResponseFields. +type SecurityAIAssistantAPIIndexEntryResponseFields struct { + // Description Description for when this index or data stream should be queried for Knowledge Base content. Passed to the LLM as a tool description. + Description string `json:"description"` + + // Field Field to query for Knowledge Base content. + Field string `json:"field"` + + // Index Index or Data Stream to query for Knowledge Base content. + Index string `json:"index"` + + // InputSchema Array of objects defining the input schema, allowing the LLM to extract structured data to be used in retrieval. + InputSchema *SecurityAIAssistantAPIInputSchema `json:"inputSchema,omitempty"` + + // OutputFields Fields to extract from the query result, defaults to all fields if not provided or empty. + OutputFields *[]string `json:"outputFields,omitempty"` + + // QueryDescription Description of query field used to fetch Knowledge Base content. Passed to the LLM as part of the tool input schema. + QueryDescription string `json:"queryDescription"` + + // Type Entry type. + Type SecurityAIAssistantAPIIndexEntryResponseFieldsType `json:"type"` +} + +// SecurityAIAssistantAPIIndexEntryResponseFieldsType Entry type. +type SecurityAIAssistantAPIIndexEntryResponseFieldsType string + +// SecurityAIAssistantAPIIndexEntryUpdateFields defines model for Security_AI_Assistant_API_IndexEntryUpdateFields. +type SecurityAIAssistantAPIIndexEntryUpdateFields struct { + // Description Description for when this index or data stream should be queried for Knowledge Base content. Passed to the LLM as a tool description. + Description string `json:"description"` + + // Field Field to query for Knowledge Base content. + Field string `json:"field"` + + // Global Whether this Knowledge Base Entry is global, defaults to false. + Global *bool `json:"global,omitempty"` + + // Id A string that does not contain only whitespace characters. + Id SecurityAIAssistantAPINonEmptyString `json:"id"` + + // Index Index or Data Stream to query for Knowledge Base content. + Index string `json:"index"` + + // InputSchema Array of objects defining the input schema, allowing the LLM to extract structured data to be used in retrieval. + InputSchema *SecurityAIAssistantAPIInputSchema `json:"inputSchema,omitempty"` + + // Name Name of the Knowledge Base Entry. + Name string `json:"name"` + + // Namespace Kibana Space, defaults to 'default' space. + Namespace *string `json:"namespace,omitempty"` + + // OutputFields Fields to extract from the query result, defaults to all fields if not provided or empty. + OutputFields *[]string `json:"outputFields,omitempty"` + + // QueryDescription Description of query field used to fetch Knowledge Base content. Passed to the LLM as part of the tool input schema. + QueryDescription string `json:"queryDescription"` + + // Type Entry type. + Type SecurityAIAssistantAPIIndexEntryUpdateFieldsType `json:"type"` + + // Users Users who have access to the Knowledge Base Entry, defaults to current user. Empty array provides access to all users. + Users *[]SecurityAIAssistantAPIUser `json:"users,omitempty"` +} + +// SecurityAIAssistantAPIIndexEntryUpdateFieldsType Entry type. +type SecurityAIAssistantAPIIndexEntryUpdateFieldsType string + +// SecurityAIAssistantAPIInputSchema Array of objects defining the input schema, allowing the LLM to extract structured data to be used in retrieval. +type SecurityAIAssistantAPIInputSchema = []struct { + // Description Description of the field. + Description string `json:"description"` + + // FieldName Name of the field. + FieldName string `json:"fieldName"` + + // FieldType Type of the field. + FieldType string `json:"fieldType"` +} + +// SecurityAIAssistantAPIKnowledgeBaseEntryBulkActionSkipReason Reason why a Knowledge Base Entry was skipped during the bulk action. +type SecurityAIAssistantAPIKnowledgeBaseEntryBulkActionSkipReason string + +// SecurityAIAssistantAPIKnowledgeBaseEntryBulkActionSkipResult defines model for Security_AI_Assistant_API_KnowledgeBaseEntryBulkActionSkipResult. +type SecurityAIAssistantAPIKnowledgeBaseEntryBulkActionSkipResult struct { + // Id ID of the skipped Knowledge Base Entry. + Id string `json:"id"` + + // Name Name of the skipped Knowledge Base Entry. + Name *string `json:"name,omitempty"` + + // SkipReason Reason why a Knowledge Base Entry was skipped during the bulk action. + SkipReason SecurityAIAssistantAPIKnowledgeBaseEntryBulkActionSkipReason `json:"skip_reason"` +} + +// SecurityAIAssistantAPIKnowledgeBaseEntryBulkCrudActionResponse defines model for Security_AI_Assistant_API_KnowledgeBaseEntryBulkCrudActionResponse. +type SecurityAIAssistantAPIKnowledgeBaseEntryBulkCrudActionResponse struct { + Attributes struct { + // Errors List of errors encountered during the bulk action. + Errors *[]SecurityAIAssistantAPINormalizedKnowledgeBaseEntryError `json:"errors,omitempty"` + Results SecurityAIAssistantAPIKnowledgeBaseEntryBulkCrudActionResults `json:"results"` + Summary SecurityAIAssistantAPIKnowledgeBaseEntryBulkCrudActionSummary `json:"summary"` + } `json:"attributes"` + + // KnowledgeBaseEntriesCount Total number of Knowledge Base Entries processed. + KnowledgeBaseEntriesCount *int `json:"knowledgeBaseEntriesCount,omitempty"` + + // Message Message describing the result of the bulk action. + Message *string `json:"message,omitempty"` + + // StatusCode HTTP status code of the response. + StatusCode *int `json:"statusCode,omitempty"` + + // Success Indicates whether the bulk action was successful. + Success *bool `json:"success,omitempty"` +} + +// SecurityAIAssistantAPIKnowledgeBaseEntryBulkCrudActionResults defines model for Security_AI_Assistant_API_KnowledgeBaseEntryBulkCrudActionResults. +type SecurityAIAssistantAPIKnowledgeBaseEntryBulkCrudActionResults struct { + // Created List of Knowledge Base Entries that were successfully created. + Created []SecurityAIAssistantAPIKnowledgeBaseEntryResponse `json:"created"` + + // Deleted List of IDs of Knowledge Base Entries that were successfully deleted. + Deleted []string `json:"deleted"` + + // Skipped List of Knowledge Base Entries that were skipped during the bulk action. + Skipped []SecurityAIAssistantAPIKnowledgeBaseEntryBulkActionSkipResult `json:"skipped"` + + // Updated List of Knowledge Base Entries that were successfully updated. + Updated []SecurityAIAssistantAPIKnowledgeBaseEntryResponse `json:"updated"` +} + +// SecurityAIAssistantAPIKnowledgeBaseEntryBulkCrudActionSummary defines model for Security_AI_Assistant_API_KnowledgeBaseEntryBulkCrudActionSummary. +type SecurityAIAssistantAPIKnowledgeBaseEntryBulkCrudActionSummary struct { + // Failed Number of Knowledge Base Entries that failed during the bulk action. + Failed int `json:"failed"` + + // Skipped Number of Knowledge Base Entries that were skipped during the bulk action. + Skipped int `json:"skipped"` + + // Succeeded Number of Knowledge Base Entries that were successfully processed during the bulk action. + Succeeded int `json:"succeeded"` + + // Total Total number of Knowledge Base Entries involved in the bulk action. + Total int `json:"total"` +} + +// SecurityAIAssistantAPIKnowledgeBaseEntryContentReference defines model for Security_AI_Assistant_API_KnowledgeBaseEntryContentReference. +type SecurityAIAssistantAPIKnowledgeBaseEntryContentReference struct { + // Id Id of the content reference + Id string `json:"id"` + + // KnowledgeBaseEntryId Id of the Knowledge Base Entry + KnowledgeBaseEntryId string `json:"knowledgeBaseEntryId"` + + // KnowledgeBaseEntryName Name of the knowledge base entry + KnowledgeBaseEntryName string `json:"knowledgeBaseEntryName"` + Type SecurityAIAssistantAPIKnowledgeBaseEntryContentReferenceType `json:"type"` +} + +// SecurityAIAssistantAPIKnowledgeBaseEntryContentReferenceType defines model for SecurityAIAssistantAPIKnowledgeBaseEntryContentReference.Type. +type SecurityAIAssistantAPIKnowledgeBaseEntryContentReferenceType string + +// SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps defines model for Security_AI_Assistant_API_KnowledgeBaseEntryCreateProps. +type SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps struct { + union json.RawMessage +} + +// SecurityAIAssistantAPIKnowledgeBaseEntryDetailsInError defines model for Security_AI_Assistant_API_KnowledgeBaseEntryDetailsInError. +type SecurityAIAssistantAPIKnowledgeBaseEntryDetailsInError struct { + // Id ID of the Knowledge Base Entry that encountered an error. + Id string `json:"id"` + + // Name Name of the Knowledge Base Entry that encountered an error. + Name *string `json:"name,omitempty"` +} + +// SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema defines model for Security_AI_Assistant_API_KnowledgeBaseEntryErrorSchema. +type SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema struct { + // Error Error type or category. + Error string `json:"error"` + + // Message Detailed error message. + Message string `json:"message"` + + // StatusCode HTTP status code of the error. + StatusCode float32 `json:"statusCode"` +} + +// SecurityAIAssistantAPIKnowledgeBaseEntryResponse defines model for Security_AI_Assistant_API_KnowledgeBaseEntryResponse. +type SecurityAIAssistantAPIKnowledgeBaseEntryResponse struct { + union json.RawMessage +} + +// SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps defines model for Security_AI_Assistant_API_KnowledgeBaseEntryUpdateProps. +type SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps struct { + union json.RawMessage +} + +// SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps defines model for Security_AI_Assistant_API_KnowledgeBaseEntryUpdateRouteProps. +type SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps struct { + union json.RawMessage +} + +// SecurityAIAssistantAPIKnowledgeBaseResource Knowledge Base resource name for grouping entries, e.g. 'security_labs', 'user', etc. +type SecurityAIAssistantAPIKnowledgeBaseResource string + +// SecurityAIAssistantAPIKnowledgeBaseResponse AI assistant KnowledgeBase. +type SecurityAIAssistantAPIKnowledgeBaseResponse struct { + // Success Identify the success of the method execution. + Success *bool `json:"success,omitempty"` +} + +// SecurityAIAssistantAPIMessage AI assistant conversation message. +type SecurityAIAssistantAPIMessage struct { + // Content Message content. + Content string `json:"content"` + + // Id A string that does not contain only whitespace characters. + Id *SecurityAIAssistantAPINonEmptyString `json:"id,omitempty"` + + // IsError Is error message. + IsError *bool `json:"isError,omitempty"` + + // Metadata Message metadata + Metadata *SecurityAIAssistantAPIMessageMetadata `json:"metadata,omitempty"` + Reader *SecurityAIAssistantAPIReader `json:"reader,omitempty"` + + // Role Message role. + Role SecurityAIAssistantAPIMessageRole `json:"role"` + + // Timestamp A string that represents a timestamp in ISO 8601 format and does not contain only whitespace characters. + Timestamp SecurityAIAssistantAPINonEmptyTimestamp `json:"timestamp"` + + // TraceData Trace Data + TraceData *SecurityAIAssistantAPITraceData `json:"traceData,omitempty"` + + // User Could be any string, not necessarily a UUID. + User *SecurityAIAssistantAPIUser `json:"user,omitempty"` +} + +// SecurityAIAssistantAPIMessageData ECS-style metadata attached to the message. +type SecurityAIAssistantAPIMessageData map[string]interface{} + +// SecurityAIAssistantAPIMessageMetadata Message metadata +type SecurityAIAssistantAPIMessageMetadata struct { + // ContentReferences A union of all content reference types + ContentReferences *SecurityAIAssistantAPIContentReferences `json:"contentReferences,omitempty"` +} + +// SecurityAIAssistantAPIMessageRole Message role. +type SecurityAIAssistantAPIMessageRole string + +// SecurityAIAssistantAPINonEmptyString A string that does not contain only whitespace characters. +type SecurityAIAssistantAPINonEmptyString = string + +// SecurityAIAssistantAPINonEmptyTimestamp A string that represents a timestamp in ISO 8601 format and does not contain only whitespace characters. +type SecurityAIAssistantAPINonEmptyTimestamp = string + +// SecurityAIAssistantAPINormalizedAnonymizationFieldError defines model for Security_AI_Assistant_API_NormalizedAnonymizationFieldError. +type SecurityAIAssistantAPINormalizedAnonymizationFieldError struct { + // AnonymizationFields Array of anonymization fields that caused the error. + AnonymizationFields []SecurityAIAssistantAPIAnonymizationFieldDetailsInError `json:"anonymization_fields"` + + // ErrCode Error code indicating the type of failure. + ErrCode *string `json:"err_code,omitempty"` + + // Message Error message. + Message string `json:"message"` + + // StatusCode Status code of the response. + StatusCode int `json:"status_code"` +} + +// SecurityAIAssistantAPINormalizedKnowledgeBaseEntryError defines model for Security_AI_Assistant_API_NormalizedKnowledgeBaseEntryError. +type SecurityAIAssistantAPINormalizedKnowledgeBaseEntryError struct { + // ErrCode Specific error code for the issue. + ErrCode *string `json:"err_code,omitempty"` + + // KnowledgeBaseEntries List of Knowledge Base Entries that encountered the error. + KnowledgeBaseEntries []SecurityAIAssistantAPIKnowledgeBaseEntryDetailsInError `json:"knowledgeBaseEntries"` + + // Message Error message describing the issue. + Message string `json:"message"` + + // StatusCode HTTP status code associated with the error. + StatusCode int `json:"statusCode"` +} + +// SecurityAIAssistantAPINormalizedPromptError defines model for Security_AI_Assistant_API_NormalizedPromptError. +type SecurityAIAssistantAPINormalizedPromptError struct { + // ErrCode A code representing the error type. + ErrCode *string `json:"err_code,omitempty"` + + // Message A message describing the error encountered. + Message string `json:"message"` + + // Prompts List of prompts that encountered errors. + Prompts []SecurityAIAssistantAPIPromptDetailsInError `json:"prompts"` + + // StatusCode The HTTP status code associated with the error. + StatusCode int `json:"status_code"` +} + +// SecurityAIAssistantAPIProductDocumentationContentReference defines model for Security_AI_Assistant_API_ProductDocumentationContentReference. +type SecurityAIAssistantAPIProductDocumentationContentReference struct { + // Id Id of the content reference + Id string `json:"id"` + + // Title Title of the documentation + Title string `json:"title"` + Type SecurityAIAssistantAPIProductDocumentationContentReferenceType `json:"type"` + + // Url URL to the documentation + Url string `json:"url"` +} + +// SecurityAIAssistantAPIProductDocumentationContentReferenceType defines model for SecurityAIAssistantAPIProductDocumentationContentReference.Type. +type SecurityAIAssistantAPIProductDocumentationContentReferenceType string + +// SecurityAIAssistantAPIPromptCreateProps defines model for Security_AI_Assistant_API_PromptCreateProps. +type SecurityAIAssistantAPIPromptCreateProps struct { + // Categories List of categories for the prompt. + Categories *[]string `json:"categories,omitempty"` + + // Color The color associated with the prompt. + Color *string `json:"color,omitempty"` + + // Consumer The consumer associated with the prompt. + Consumer *string `json:"consumer,omitempty"` + + // Content The content of the prompt. + Content string `json:"content"` + + // IsDefault Whether this prompt should be the default. + IsDefault *bool `json:"isDefault,omitempty"` + + // IsNewConversationDefault Whether this prompt should be the default for new conversations. + IsNewConversationDefault *bool `json:"isNewConversationDefault,omitempty"` + + // Name The name of the prompt. + Name string `json:"name"` + + // PromptType Type of the prompt (either system or quick). + PromptType SecurityAIAssistantAPIPromptType `json:"promptType"` +} + +// SecurityAIAssistantAPIPromptDetailsInError defines model for Security_AI_Assistant_API_PromptDetailsInError. +type SecurityAIAssistantAPIPromptDetailsInError struct { + // Id The ID of the prompt that encountered an error. + Id string `json:"id"` + + // Name The name of the prompt that encountered an error. + Name *string `json:"name,omitempty"` +} + +// SecurityAIAssistantAPIPromptResponse defines model for Security_AI_Assistant_API_PromptResponse. +type SecurityAIAssistantAPIPromptResponse struct { + // Categories Categories associated with the prompt. + Categories *[]string `json:"categories,omitempty"` + + // Color The color associated with the prompt. + Color *string `json:"color,omitempty"` + + // Consumer The consumer that the prompt is associated with. + Consumer *string `json:"consumer,omitempty"` + + // Content The content of the prompt. + Content string `json:"content"` + + // CreatedAt The timestamp of when the prompt was created. + CreatedAt *string `json:"createdAt,omitempty"` + + // CreatedBy The user who created the prompt. + CreatedBy *string `json:"createdBy,omitempty"` + + // Id A string that does not contain only whitespace characters. + Id SecurityAIAssistantAPINonEmptyString `json:"id"` + + // IsDefault Whether this prompt is the default. + IsDefault *bool `json:"isDefault,omitempty"` + + // IsNewConversationDefault Whether this prompt is the default for new conversations. + IsNewConversationDefault *bool `json:"isNewConversationDefault,omitempty"` + + // Name The name of the prompt. + Name string `json:"name"` + + // Namespace Kibana space where the prompt is located. + Namespace *string `json:"namespace,omitempty"` + + // PromptType Type of the prompt (either system or quick). + PromptType SecurityAIAssistantAPIPromptType `json:"promptType"` + + // Timestamp A string that represents a timestamp in ISO 8601 format and does not contain only whitespace characters. + Timestamp *SecurityAIAssistantAPINonEmptyTimestamp `json:"timestamp,omitempty"` + + // UpdatedAt The timestamp of when the prompt was last updated. + UpdatedAt *string `json:"updatedAt,omitempty"` + + // UpdatedBy The user who last updated the prompt. + UpdatedBy *string `json:"updatedBy,omitempty"` + + // Users List of users associated with the prompt. + Users *[]SecurityAIAssistantAPIUser `json:"users,omitempty"` +} + +// SecurityAIAssistantAPIPromptType Type of the prompt (either system or quick). +type SecurityAIAssistantAPIPromptType string + +// SecurityAIAssistantAPIPromptUpdateProps defines model for Security_AI_Assistant_API_PromptUpdateProps. +type SecurityAIAssistantAPIPromptUpdateProps struct { + // Categories The updated categories for the prompt. + Categories *[]string `json:"categories,omitempty"` + + // Color The updated color associated with the prompt. + Color *string `json:"color,omitempty"` + + // Consumer The updated consumer for the prompt. + Consumer *string `json:"consumer,omitempty"` + + // Content The updated content for the prompt. + Content *string `json:"content,omitempty"` + + // Id The ID of the prompt to update. + Id string `json:"id"` + + // IsDefault Whether this prompt should be the default. + IsDefault *bool `json:"isDefault,omitempty"` + + // IsNewConversationDefault Whether the prompt should be the default for new conversations. + IsNewConversationDefault *bool `json:"isNewConversationDefault,omitempty"` +} + +// SecurityAIAssistantAPIPromptsBulkActionSkipReason Reason why a prompt was skipped during the bulk action. +type SecurityAIAssistantAPIPromptsBulkActionSkipReason string + +// SecurityAIAssistantAPIPromptsBulkActionSkipResult defines model for Security_AI_Assistant_API_PromptsBulkActionSkipResult. +type SecurityAIAssistantAPIPromptsBulkActionSkipResult struct { + // Id The ID of the prompt that was skipped. + Id string `json:"id"` + + // Name The name of the prompt that was skipped. + Name *string `json:"name,omitempty"` + + // SkipReason Reason why a prompt was skipped during the bulk action. + SkipReason SecurityAIAssistantAPIPromptsBulkActionSkipReason `json:"skip_reason"` +} + +// SecurityAIAssistantAPIPromptsBulkCrudActionResponse defines model for Security_AI_Assistant_API_PromptsBulkCrudActionResponse. +type SecurityAIAssistantAPIPromptsBulkCrudActionResponse struct { + Attributes struct { + Errors *[]SecurityAIAssistantAPINormalizedPromptError `json:"errors,omitempty"` + Results SecurityAIAssistantAPIPromptsBulkCrudActionResults `json:"results"` + Summary SecurityAIAssistantAPIBulkCrudActionSummary `json:"summary"` + } `json:"attributes"` + + // Message A message describing the result of the bulk action. + Message *string `json:"message,omitempty"` + + // PromptsCount The number of prompts processed in the bulk action. + PromptsCount *int `json:"prompts_count,omitempty"` + + // StatusCode The HTTP status code of the response. + StatusCode *int `json:"status_code,omitempty"` + + // Success Indicates if the bulk action was successful. + Success *bool `json:"success,omitempty"` +} + +// SecurityAIAssistantAPIPromptsBulkCrudActionResults defines model for Security_AI_Assistant_API_PromptsBulkCrudActionResults. +type SecurityAIAssistantAPIPromptsBulkCrudActionResults struct { + // Created List of prompts that were created. + Created []SecurityAIAssistantAPIPromptResponse `json:"created"` + + // Deleted List of IDs of prompts that were deleted. + Deleted []string `json:"deleted"` + + // Skipped List of prompts that were skipped. + Skipped []SecurityAIAssistantAPIPromptsBulkActionSkipResult `json:"skipped"` + + // Updated List of prompts that were updated. + Updated []SecurityAIAssistantAPIPromptResponse `json:"updated"` +} + +// SecurityAIAssistantAPIProvider Provider +type SecurityAIAssistantAPIProvider string + +// SecurityAIAssistantAPIReader defines model for Security_AI_Assistant_API_Reader. +type SecurityAIAssistantAPIReader map[string]interface{} + +// SecurityAIAssistantAPIReplacements Replacements object used to anonymize/deanonymize messages +type SecurityAIAssistantAPIReplacements map[string]string + +// SecurityAIAssistantAPIResponseFields defines model for Security_AI_Assistant_API_ResponseFields. +type SecurityAIAssistantAPIResponseFields struct { + // CreatedAt Time the Knowledge Base Entry was created. + CreatedAt string `json:"createdAt"` + + // CreatedBy User who created the Knowledge Base Entry. + CreatedBy string `json:"createdBy"` + + // Id A string that does not contain only whitespace characters. + Id SecurityAIAssistantAPINonEmptyString `json:"id"` + + // UpdatedAt Time the Knowledge Base Entry was last updated. + UpdatedAt string `json:"updatedAt"` + + // UpdatedBy User who last updated the Knowledge Base Entry. + UpdatedBy string `json:"updatedBy"` +} + +// SecurityAIAssistantAPISecurityAlertContentReference defines model for Security_AI_Assistant_API_SecurityAlertContentReference. +type SecurityAIAssistantAPISecurityAlertContentReference struct { + // AlertId ID of the Alert + AlertId string `json:"alertId"` + + // Id Id of the content reference + Id string `json:"id"` + Type SecurityAIAssistantAPISecurityAlertContentReferenceType `json:"type"` +} + +// SecurityAIAssistantAPISecurityAlertContentReferenceType defines model for SecurityAIAssistantAPISecurityAlertContentReference.Type. +type SecurityAIAssistantAPISecurityAlertContentReferenceType string + +// SecurityAIAssistantAPISecurityAlertsPageContentReference defines model for Security_AI_Assistant_API_SecurityAlertsPageContentReference. +type SecurityAIAssistantAPISecurityAlertsPageContentReference struct { + // Id Id of the content reference + Id string `json:"id"` + Type SecurityAIAssistantAPISecurityAlertsPageContentReferenceType `json:"type"` +} + +// SecurityAIAssistantAPISecurityAlertsPageContentReferenceType defines model for SecurityAIAssistantAPISecurityAlertsPageContentReference.Type. +type SecurityAIAssistantAPISecurityAlertsPageContentReferenceType string + +// SecurityAIAssistantAPISortOrder The order in which results are sorted. +type SecurityAIAssistantAPISortOrder string + +// SecurityAIAssistantAPITraceData Trace Data +type SecurityAIAssistantAPITraceData struct { + // TraceId Could be any string, not necessarily a UUID + TraceId *string `json:"traceId,omitempty"` + + // TransactionId Could be any string, not necessarily a UUID + TransactionId *string `json:"transactionId,omitempty"` +} + +// SecurityAIAssistantAPIUser Could be any string, not necessarily a UUID. +type SecurityAIAssistantAPIUser struct { + // Id User id. + Id *string `json:"id,omitempty"` + + // Name User name. + Name *string `json:"name,omitempty"` +} + +// SecurityAIAssistantAPIVector Object containing Knowledge Base Entry text embeddings and modelId used to create the embeddings. +type SecurityAIAssistantAPIVector struct { + // ModelId ID of the model used to create the embeddings. + ModelId string `json:"modelId"` + + // Tokens Tokens with their corresponding values. + Tokens map[string]float32 `json:"tokens"` +} + +// SecurityDetectionsAPIAlertAssignees defines model for Security_Detections_API_AlertAssignees. +type SecurityDetectionsAPIAlertAssignees struct { + Add []string `json:"add"` + Remove []string `json:"remove"` +} + +// SecurityDetectionsAPIAlertIds A list of alerts `id`s. +type SecurityDetectionsAPIAlertIds = []string + +// SecurityDetectionsAPIAlertStatus The status of an alert, which can be `open`, `acknowledged`, `in-progress`, or `closed`. +type SecurityDetectionsAPIAlertStatus string + +// SecurityDetectionsAPIAlertSuppression Defines alert suppression configuration. +type SecurityDetectionsAPIAlertSuppression struct { + Duration *SecurityDetectionsAPIAlertSuppressionDuration `json:"duration,omitempty"` + GroupBy SecurityDetectionsAPIAlertSuppressionGroupBy `json:"group_by"` + + // MissingFieldsStrategy Describes how alerts will be generated for documents with missing suppress by fields: + // doNotSuppress - per each document a separate alert will be created + // suppress - only alert will be created per suppress by bucket + MissingFieldsStrategy *SecurityDetectionsAPIAlertSuppressionMissingFieldsStrategy `json:"missing_fields_strategy,omitempty"` +} + +// SecurityDetectionsAPIAlertSuppressionDuration defines model for Security_Detections_API_AlertSuppressionDuration. +type SecurityDetectionsAPIAlertSuppressionDuration struct { + // Unit Time unit + Unit SecurityDetectionsAPIAlertSuppressionDurationUnit `json:"unit"` + Value int `json:"value"` +} + +// SecurityDetectionsAPIAlertSuppressionDurationUnit Time unit +type SecurityDetectionsAPIAlertSuppressionDurationUnit string + +// SecurityDetectionsAPIAlertSuppressionGroupBy defines model for Security_Detections_API_AlertSuppressionGroupBy. +type SecurityDetectionsAPIAlertSuppressionGroupBy = []string + +// SecurityDetectionsAPIAlertSuppressionMissingFieldsStrategy Describes how alerts will be generated for documents with missing suppress by fields: +// doNotSuppress - per each document a separate alert will be created +// suppress - only alert will be created per suppress by bucket +type SecurityDetectionsAPIAlertSuppressionMissingFieldsStrategy string + +// SecurityDetectionsAPIAlertTag Use alert tags to organize related alerts into categories that you can filter and group. +type SecurityDetectionsAPIAlertTag = string + +// SecurityDetectionsAPIAlertTags List of keywords to organize related alerts into categories that you can filter and group. +type SecurityDetectionsAPIAlertTags = []SecurityDetectionsAPIAlertTag + +// SecurityDetectionsAPIAlertVersion defines model for Security_Detections_API_AlertVersion. +type SecurityDetectionsAPIAlertVersion struct { + Count int `json:"count"` + Version int `json:"version"` +} + +// SecurityDetectionsAPIAlertsIndex (deprecated) Has no effect. +type SecurityDetectionsAPIAlertsIndex = string + +// SecurityDetectionsAPIAlertsIndexMigrationError defines model for Security_Detections_API_AlertsIndexMigrationError. +type SecurityDetectionsAPIAlertsIndexMigrationError struct { + Error struct { + Message string `json:"message"` + StatusCode string `json:"status_code"` + } `json:"error"` + Index string `json:"index"` +} + +// SecurityDetectionsAPIAlertsIndexMigrationSuccess defines model for Security_Detections_API_AlertsIndexMigrationSuccess. +type SecurityDetectionsAPIAlertsIndexMigrationSuccess struct { + Index string `json:"index"` + MigrationId string `json:"migration_id"` + MigrationIndex string `json:"migration_index"` +} + +// SecurityDetectionsAPIAlertsIndexNamespace Has no effect. +type SecurityDetectionsAPIAlertsIndexNamespace = string + +// SecurityDetectionsAPIAlertsReindexOptions defines model for Security_Detections_API_AlertsReindexOptions. +type SecurityDetectionsAPIAlertsReindexOptions struct { + // RequestsPerSecond The throttle for the migration task in sub-requests per second. Corresponds to requests_per_second on the Reindex API. + RequestsPerSecond *int `json:"requests_per_second,omitempty"` + + // Size Number of alerts to migrate per batch. Corresponds to the source.size option on the Reindex API. + Size *int `json:"size,omitempty"` + + // Slices The number of subtasks for the migration task. Corresponds to slices on the Reindex API. + Slices *int `json:"slices,omitempty"` +} + +// SecurityDetectionsAPIAlertsSort defines model for Security_Detections_API_AlertsSort. +type SecurityDetectionsAPIAlertsSort struct { + union json.RawMessage +} + +// SecurityDetectionsAPIAlertsSort1 defines model for . +type SecurityDetectionsAPIAlertsSort1 = []SecurityDetectionsAPIAlertsSortCombinations + +// SecurityDetectionsAPIAlertsSortCombinations defines model for Security_Detections_API_AlertsSortCombinations. +type SecurityDetectionsAPIAlertsSortCombinations struct { + union json.RawMessage +} + +// SecurityDetectionsAPIAlertsSortCombinations0 defines model for . +type SecurityDetectionsAPIAlertsSortCombinations0 = string + +// SecurityDetectionsAPIAlertsSortCombinations1 defines model for . +type SecurityDetectionsAPIAlertsSortCombinations1 map[string]interface{} + +// SecurityDetectionsAPIAnomalyThreshold Anomaly score threshold above which the rule creates an alert. Valid values are from 0 to 100. +type SecurityDetectionsAPIAnomalyThreshold = int + +// SecurityDetectionsAPIBuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. +// By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. +// For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). +type SecurityDetectionsAPIBuildingBlockType = string + +// SecurityDetectionsAPIBulkActionEditPayload defines model for Security_Detections_API_BulkActionEditPayload. +type SecurityDetectionsAPIBulkActionEditPayload struct { + union json.RawMessage +} + +// SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression defines model for Security_Detections_API_BulkActionEditPayloadAlertSuppression. +type SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression struct { + union json.RawMessage +} + +// SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression defines model for Security_Detections_API_BulkActionEditPayloadDeleteAlertSuppression. +type SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression struct { + Type SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppressionType `json:"type"` +} + +// SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppressionType defines model for SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression.Type. +type SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppressionType string + +// SecurityDetectionsAPIBulkActionEditPayloadIndexPatterns Edits index patterns of rulesClient. +// +// - `add_index_patterns` adds index patterns to rules. If an index pattern already exists for a rule, no changes are made. +// - `delete_index_patterns` removes index patterns from rules. If an index pattern does not exist for a rule, no changes are made. +// - `set_index_patterns` sets index patterns for rules, overwriting any existing index patterns. If the set of index patterns is the same as the existing index patterns, no changes are made. +type SecurityDetectionsAPIBulkActionEditPayloadIndexPatterns struct { + // OverwriteDataViews Resets the data view for the rule. + OverwriteDataViews *bool `json:"overwrite_data_views,omitempty"` + Type SecurityDetectionsAPIBulkActionEditPayloadIndexPatternsType `json:"type"` + + // Value Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Value SecurityDetectionsAPIIndexPatternArray `json:"value"` +} + +// SecurityDetectionsAPIBulkActionEditPayloadIndexPatternsType defines model for SecurityDetectionsAPIBulkActionEditPayloadIndexPatterns.Type. +type SecurityDetectionsAPIBulkActionEditPayloadIndexPatternsType string + +// SecurityDetectionsAPIBulkActionEditPayloadInvestigationFields Edits investigation fields of rules. +// +// - `add_investigation_fields` adds investigation fields to rules. If an investigation field already exists for a rule, no changes are made. +// - `delete_investigation_fields` removes investigation fields from rules. If an investigation field does not exist for a rule, no changes are made. +// - `set_investigation_fields` sets investigation fields for rules. If the set of investigation fields is the same as the existing investigation fields, no changes are made. +type SecurityDetectionsAPIBulkActionEditPayloadInvestigationFields struct { + Type SecurityDetectionsAPIBulkActionEditPayloadInvestigationFieldsType `json:"type"` + + // Value Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + Value SecurityDetectionsAPIInvestigationFields `json:"value"` +} + +// SecurityDetectionsAPIBulkActionEditPayloadInvestigationFieldsType defines model for SecurityDetectionsAPIBulkActionEditPayloadInvestigationFields.Type. +type SecurityDetectionsAPIBulkActionEditPayloadInvestigationFieldsType string + +// SecurityDetectionsAPIBulkActionEditPayloadRuleActions Edits rule actions of rules. +// +// - `add_rule_actions` adds rule actions to rules. This action is non-idempotent, meaning that even if the same rule action already exists for a rule, it will be added again with a new unique ID. +// - `set_rule_actions` sets rule actions for rules. This action is non-idempotent, meaning that even if the same set of rule actions already exists for a rule, it will be set again and the actions will receive new unique IDs. +type SecurityDetectionsAPIBulkActionEditPayloadRuleActions struct { + Type SecurityDetectionsAPIBulkActionEditPayloadRuleActionsType `json:"type"` + Value struct { + Actions []SecurityDetectionsAPINormalizedRuleAction `json:"actions"` + + // Throttle Defines the maximum interval in which a rule’s actions are executed. + // > info + // > The rule level `throttle` field is deprecated in Elastic Security 8.8 and will remain active for at least the next 12 months. + // > In Elastic Security 8.8 and later, you can use the `frequency` field to define frequencies for individual actions. Actions without frequencies will acquire a converted version of the rule’s `throttle` field. In the response, the converted `throttle` setting appears in the individual actions' `frequency` field. + Throttle *SecurityDetectionsAPIThrottleForBulkActions `json:"throttle,omitempty"` + } `json:"value"` +} + +// SecurityDetectionsAPIBulkActionEditPayloadRuleActionsType defines model for SecurityDetectionsAPIBulkActionEditPayloadRuleActions.Type. +type SecurityDetectionsAPIBulkActionEditPayloadRuleActionsType string + +// SecurityDetectionsAPIBulkActionEditPayloadSchedule Overwrites schedule of rules. +// +// - `set_schedule` sets a schedule for rules. If the same schedule already exists for a rule, no changes are made. +// +// Both `interval` and `lookback` have a format of "{integer}{time_unit}", where accepted time units are `s` for seconds, `m` for minutes, and `h` for hours. The integer must be positive and larger than 0. Examples: "45s", "30m", "6h" +type SecurityDetectionsAPIBulkActionEditPayloadSchedule struct { + Type SecurityDetectionsAPIBulkActionEditPayloadScheduleType `json:"type"` + Value struct { + // Interval Interval in which the rule runs. For example, `"1h"` means the rule runs every hour. + Interval string `json:"interval"` + + // Lookback Lookback time for the rules. + // + // Additional look-back time that the rule analyzes. For example, "10m" means the rule analyzes the last 10 minutes of data in addition to the frequency interval. + Lookback string `json:"lookback"` + } `json:"value"` +} + +// SecurityDetectionsAPIBulkActionEditPayloadScheduleType defines model for SecurityDetectionsAPIBulkActionEditPayloadSchedule.Type. +type SecurityDetectionsAPIBulkActionEditPayloadScheduleType string + +// SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression defines model for Security_Detections_API_BulkActionEditPayloadSetAlertSuppression. +type SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression struct { + Type SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionType `json:"type"` + + // Value Defines alert suppression configuration. + Value SecurityDetectionsAPIAlertSuppression `json:"value"` +} + +// SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionType defines model for SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression.Type. +type SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionType string + +// SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold defines model for Security_Detections_API_BulkActionEditPayloadSetAlertSuppressionForThreshold. +type SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold struct { + Type SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThresholdType `json:"type"` + + // Value Defines alert suppression configuration. + Value SecurityDetectionsAPIThresholdAlertSuppression `json:"value"` +} + +// SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThresholdType defines model for SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold.Type. +type SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThresholdType string + +// SecurityDetectionsAPIBulkActionEditPayloadTags Edits tags of rules. +// +// - `add_tags` adds tags to rules. If a tag already exists for a rule, no changes are made. +// - `delete_tags` removes tags from rules. If a tag does not exist for a rule, no changes are made. +// - `set_tags` sets tags for rules, overwriting any existing tags. If the set of tags is the same as the existing tags, no changes are made. +type SecurityDetectionsAPIBulkActionEditPayloadTags struct { + Type SecurityDetectionsAPIBulkActionEditPayloadTagsType `json:"type"` + + // Value String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Value SecurityDetectionsAPIRuleTagArray `json:"value"` +} + +// SecurityDetectionsAPIBulkActionEditPayloadTagsType defines model for SecurityDetectionsAPIBulkActionEditPayloadTags.Type. +type SecurityDetectionsAPIBulkActionEditPayloadTagsType string + +// SecurityDetectionsAPIBulkActionEditPayloadTimeline Edits timeline of rules. +// +// - `set_timeline` sets a timeline for rules. If the same timeline already exists for a rule, no changes are made. +type SecurityDetectionsAPIBulkActionEditPayloadTimeline struct { + Type SecurityDetectionsAPIBulkActionEditPayloadTimelineType `json:"type"` + Value struct { + // TimelineId Timeline template ID + TimelineId SecurityDetectionsAPITimelineTemplateId `json:"timeline_id"` + + // TimelineTitle Timeline template title + TimelineTitle SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title"` + } `json:"value"` +} + +// SecurityDetectionsAPIBulkActionEditPayloadTimelineType defines model for SecurityDetectionsAPIBulkActionEditPayloadTimeline.Type. +type SecurityDetectionsAPIBulkActionEditPayloadTimelineType string + +// SecurityDetectionsAPIBulkActionSkipResult defines model for Security_Detections_API_BulkActionSkipResult. +type SecurityDetectionsAPIBulkActionSkipResult struct { + Id string `json:"id"` + Name *string `json:"name,omitempty"` + SkipReason SecurityDetectionsAPIBulkActionSkipResult_SkipReason `json:"skip_reason"` +} + +// SecurityDetectionsAPIBulkActionSkipResult_SkipReason defines model for SecurityDetectionsAPIBulkActionSkipResult.SkipReason. +type SecurityDetectionsAPIBulkActionSkipResult_SkipReason struct { + union json.RawMessage +} + +// SecurityDetectionsAPIBulkActionsDryRunErrCode defines model for Security_Detections_API_BulkActionsDryRunErrCode. +type SecurityDetectionsAPIBulkActionsDryRunErrCode string + +// SecurityDetectionsAPIBulkDeleteRules defines model for Security_Detections_API_BulkDeleteRules. +type SecurityDetectionsAPIBulkDeleteRules struct { + Action SecurityDetectionsAPIBulkDeleteRulesAction `json:"action"` + + // GapsRangeEnd Gaps range end, valid only when query is provided + GapsRangeEnd *string `json:"gaps_range_end,omitempty"` + + // GapsRangeStart Gaps range start, valid only when query is provided + GapsRangeStart *string `json:"gaps_range_start,omitempty"` + + // Ids Array of rule `id`s to which a bulk action will be applied. Do not use rule's `rule_id` here. + // Only valid when query property is undefined. + Ids *[]string `json:"ids,omitempty"` + + // Query Query to filter rules. + Query *string `json:"query,omitempty"` +} + +// SecurityDetectionsAPIBulkDeleteRulesAction defines model for SecurityDetectionsAPIBulkDeleteRules.Action. +type SecurityDetectionsAPIBulkDeleteRulesAction string + +// SecurityDetectionsAPIBulkDisableRules defines model for Security_Detections_API_BulkDisableRules. +type SecurityDetectionsAPIBulkDisableRules struct { + Action SecurityDetectionsAPIBulkDisableRulesAction `json:"action"` + + // GapsRangeEnd Gaps range end, valid only when query is provided + GapsRangeEnd *string `json:"gaps_range_end,omitempty"` + + // GapsRangeStart Gaps range start, valid only when query is provided + GapsRangeStart *string `json:"gaps_range_start,omitempty"` + + // Ids Array of rule `id`s to which a bulk action will be applied. Do not use rule's `rule_id` here. + // Only valid when query property is undefined. + Ids *[]string `json:"ids,omitempty"` + + // Query Query to filter rules. + Query *string `json:"query,omitempty"` +} + +// SecurityDetectionsAPIBulkDisableRulesAction defines model for SecurityDetectionsAPIBulkDisableRules.Action. +type SecurityDetectionsAPIBulkDisableRulesAction string + +// SecurityDetectionsAPIBulkDuplicateRules defines model for Security_Detections_API_BulkDuplicateRules. +type SecurityDetectionsAPIBulkDuplicateRules struct { + Action SecurityDetectionsAPIBulkDuplicateRulesAction `json:"action"` + + // Duplicate Duplicate object that describes applying an update action. + Duplicate *struct { + // IncludeExceptions Whether to copy exceptions from the original rule + IncludeExceptions bool `json:"include_exceptions"` + + // IncludeExpiredExceptions Whether to copy expired exceptions from the original rule + IncludeExpiredExceptions bool `json:"include_expired_exceptions"` + } `json:"duplicate,omitempty"` + + // GapsRangeEnd Gaps range end, valid only when query is provided + GapsRangeEnd *string `json:"gaps_range_end,omitempty"` + + // GapsRangeStart Gaps range start, valid only when query is provided + GapsRangeStart *string `json:"gaps_range_start,omitempty"` + + // Ids Array of rule `id`s to which a bulk action will be applied. Do not use rule's `rule_id` here. + // Only valid when query property is undefined. + Ids *[]string `json:"ids,omitempty"` + + // Query Query to filter rules. + Query *string `json:"query,omitempty"` +} + +// SecurityDetectionsAPIBulkDuplicateRulesAction defines model for SecurityDetectionsAPIBulkDuplicateRules.Action. +type SecurityDetectionsAPIBulkDuplicateRulesAction string + +// SecurityDetectionsAPIBulkEditActionResponse defines model for Security_Detections_API_BulkEditActionResponse. +type SecurityDetectionsAPIBulkEditActionResponse struct { + Attributes struct { + Errors *[]SecurityDetectionsAPINormalizedRuleError `json:"errors,omitempty"` + Results SecurityDetectionsAPIBulkEditActionResults `json:"results"` + + // Summary A rule can only be skipped when the bulk action to be performed on it results in nothing being done. For example, if the `edit` action is used to add a tag to a rule that already has that tag, or to delete an index pattern that is not specified in a rule. Objects returned in `attributes.results.skipped` will only include rules' `id`, `name`, and `skip_reason`. + Summary SecurityDetectionsAPIBulkEditActionSummary `json:"summary"` + } `json:"attributes"` + Message *string `json:"message,omitempty"` + RulesCount *int `json:"rules_count,omitempty"` + StatusCode *int `json:"status_code,omitempty"` + Success *bool `json:"success,omitempty"` +} + +// SecurityDetectionsAPIBulkEditActionResults defines model for Security_Detections_API_BulkEditActionResults. +type SecurityDetectionsAPIBulkEditActionResults struct { + Created []SecurityDetectionsAPIRuleResponse `json:"created"` + Deleted []SecurityDetectionsAPIRuleResponse `json:"deleted"` + Skipped []SecurityDetectionsAPIBulkActionSkipResult `json:"skipped"` + Updated []SecurityDetectionsAPIRuleResponse `json:"updated"` +} + +// SecurityDetectionsAPIBulkEditActionSummary A rule can only be skipped when the bulk action to be performed on it results in nothing being done. For example, if the `edit` action is used to add a tag to a rule that already has that tag, or to delete an index pattern that is not specified in a rule. Objects returned in `attributes.results.skipped` will only include rules' `id`, `name`, and `skip_reason`. +type SecurityDetectionsAPIBulkEditActionSummary struct { + Failed int `json:"failed"` + Skipped int `json:"skipped"` + Succeeded int `json:"succeeded"` + Total int `json:"total"` +} + +// SecurityDetectionsAPIBulkEditRules defines model for Security_Detections_API_BulkEditRules. +type SecurityDetectionsAPIBulkEditRules struct { + Action SecurityDetectionsAPIBulkEditRulesAction `json:"action"` + + // Edit Array of objects containing the edit operations + Edit []SecurityDetectionsAPIBulkActionEditPayload `json:"edit"` + + // GapsRangeEnd Gaps range end, valid only when query is provided + GapsRangeEnd *string `json:"gaps_range_end,omitempty"` + + // GapsRangeStart Gaps range start, valid only when query is provided + GapsRangeStart *string `json:"gaps_range_start,omitempty"` + + // Ids Array of rule `id`s to which a bulk action will be applied. Do not use rule's `rule_id` here. + // Only valid when query property is undefined. + Ids *[]string `json:"ids,omitempty"` + + // Query Query to filter rules. + Query *string `json:"query,omitempty"` +} + +// SecurityDetectionsAPIBulkEditRulesAction defines model for SecurityDetectionsAPIBulkEditRules.Action. +type SecurityDetectionsAPIBulkEditRulesAction string + +// SecurityDetectionsAPIBulkEditSkipReason defines model for Security_Detections_API_BulkEditSkipReason. +type SecurityDetectionsAPIBulkEditSkipReason string + +// SecurityDetectionsAPIBulkEnableRules defines model for Security_Detections_API_BulkEnableRules. +type SecurityDetectionsAPIBulkEnableRules struct { + Action SecurityDetectionsAPIBulkEnableRulesAction `json:"action"` + + // GapsRangeEnd Gaps range end, valid only when query is provided + GapsRangeEnd *string `json:"gaps_range_end,omitempty"` + + // GapsRangeStart Gaps range start, valid only when query is provided + GapsRangeStart *string `json:"gaps_range_start,omitempty"` + + // Ids Array of rule `id`s to which a bulk action will be applied. Do not use rule's `rule_id` here. + // Only valid when query property is undefined. + Ids *[]string `json:"ids,omitempty"` + + // Query Query to filter rules. + Query *string `json:"query,omitempty"` +} + +// SecurityDetectionsAPIBulkEnableRulesAction defines model for SecurityDetectionsAPIBulkEnableRules.Action. +type SecurityDetectionsAPIBulkEnableRulesAction string + +// SecurityDetectionsAPIBulkExportActionResponse defines model for Security_Detections_API_BulkExportActionResponse. +type SecurityDetectionsAPIBulkExportActionResponse = string + +// SecurityDetectionsAPIBulkExportRules defines model for Security_Detections_API_BulkExportRules. +type SecurityDetectionsAPIBulkExportRules struct { + Action SecurityDetectionsAPIBulkExportRulesAction `json:"action"` + + // GapsRangeEnd Gaps range end, valid only when query is provided + GapsRangeEnd *string `json:"gaps_range_end,omitempty"` + + // GapsRangeStart Gaps range start, valid only when query is provided + GapsRangeStart *string `json:"gaps_range_start,omitempty"` + + // Ids Array of rule `id`s to which a bulk action will be applied. Do not use rule's `rule_id` here. + // Only valid when query property is undefined. + Ids *[]string `json:"ids,omitempty"` + + // Query Query to filter rules. + Query *string `json:"query,omitempty"` +} + +// SecurityDetectionsAPIBulkExportRulesAction defines model for SecurityDetectionsAPIBulkExportRules.Action. +type SecurityDetectionsAPIBulkExportRulesAction string + +// SecurityDetectionsAPIBulkGapsFillingSkipReason defines model for Security_Detections_API_BulkGapsFillingSkipReason. +type SecurityDetectionsAPIBulkGapsFillingSkipReason string + +// SecurityDetectionsAPIBulkManualRuleFillGaps defines model for Security_Detections_API_BulkManualRuleFillGaps. +type SecurityDetectionsAPIBulkManualRuleFillGaps struct { + Action SecurityDetectionsAPIBulkManualRuleFillGapsAction `json:"action"` + + // FillGaps Object that describes applying a manual gap fill action for the specified time range. + FillGaps struct { + // EndDate End date of the manual gap fill + EndDate string `json:"end_date"` + + // StartDate Start date of the manual gap fill + StartDate string `json:"start_date"` + } `json:"fill_gaps"` + + // GapsRangeEnd Gaps range end, valid only when query is provided + GapsRangeEnd *string `json:"gaps_range_end,omitempty"` + + // GapsRangeStart Gaps range start, valid only when query is provided + GapsRangeStart *string `json:"gaps_range_start,omitempty"` + + // Ids Array of rule `id`s to which a bulk action will be applied. Do not use rule's `rule_id` here. + // Only valid when query property is undefined. + Ids *[]string `json:"ids,omitempty"` + + // Query Query to filter rules. + Query *string `json:"query,omitempty"` +} + +// SecurityDetectionsAPIBulkManualRuleFillGapsAction defines model for SecurityDetectionsAPIBulkManualRuleFillGaps.Action. +type SecurityDetectionsAPIBulkManualRuleFillGapsAction string + +// SecurityDetectionsAPIBulkManualRuleRun defines model for Security_Detections_API_BulkManualRuleRun. +type SecurityDetectionsAPIBulkManualRuleRun struct { + Action SecurityDetectionsAPIBulkManualRuleRunAction `json:"action"` + + // GapsRangeEnd Gaps range end, valid only when query is provided + GapsRangeEnd *string `json:"gaps_range_end,omitempty"` + + // GapsRangeStart Gaps range start, valid only when query is provided + GapsRangeStart *string `json:"gaps_range_start,omitempty"` + + // Ids Array of rule `id`s to which a bulk action will be applied. Do not use rule's `rule_id` here. + // Only valid when query property is undefined. + Ids *[]string `json:"ids,omitempty"` + + // Query Query to filter rules. + Query *string `json:"query,omitempty"` + + // Run Object that describes applying a manual rule run action. + Run struct { + // EndDate End date of the manual rule run + EndDate string `json:"end_date"` + + // StartDate Start date of the manual rule run + StartDate string `json:"start_date"` + } `json:"run"` +} + +// SecurityDetectionsAPIBulkManualRuleRunAction defines model for SecurityDetectionsAPIBulkManualRuleRun.Action. +type SecurityDetectionsAPIBulkManualRuleRunAction string + +// SecurityDetectionsAPIConcurrentSearches defines model for Security_Detections_API_ConcurrentSearches. +type SecurityDetectionsAPIConcurrentSearches = int + +// SecurityDetectionsAPIDataViewId defines model for Security_Detections_API_DataViewId. +type SecurityDetectionsAPIDataViewId = string + +// SecurityDetectionsAPIDefaultParams defines model for Security_Detections_API_DefaultParams. +type SecurityDetectionsAPIDefaultParams struct { + Command SecurityDetectionsAPIDefaultParamsCommand `json:"command"` + Comment *string `json:"comment,omitempty"` +} + +// SecurityDetectionsAPIDefaultParamsCommand defines model for SecurityDetectionsAPIDefaultParams.Command. +type SecurityDetectionsAPIDefaultParamsCommand string + +// SecurityDetectionsAPIEcsMapping Map Osquery results columns or static values to Elastic Common Schema (ECS) fields. Example: "ecs_mapping": {"process.pid": {"field": "pid"}} +type SecurityDetectionsAPIEcsMapping map[string]struct { + Field *string `json:"field,omitempty"` + Value *SecurityDetectionsAPIEcsMapping_Value `json:"value,omitempty"` +} + +// SecurityDetectionsAPIEcsMappingValue0 defines model for . +type SecurityDetectionsAPIEcsMappingValue0 = string + +// SecurityDetectionsAPIEcsMappingValue1 defines model for . +type SecurityDetectionsAPIEcsMappingValue1 = []string + +// SecurityDetectionsAPIEcsMapping_Value defines model for SecurityDetectionsAPIEcsMapping.Value. +type SecurityDetectionsAPIEcsMapping_Value struct { + union json.RawMessage +} + +// SecurityDetectionsAPIEndpointResponseAction defines model for Security_Detections_API_EndpointResponseAction. +type SecurityDetectionsAPIEndpointResponseAction struct { + ActionTypeId SecurityDetectionsAPIEndpointResponseActionActionTypeId `json:"action_type_id"` + Params SecurityDetectionsAPIEndpointResponseAction_Params `json:"params"` +} + +// SecurityDetectionsAPIEndpointResponseActionActionTypeId defines model for SecurityDetectionsAPIEndpointResponseAction.ActionTypeId. +type SecurityDetectionsAPIEndpointResponseActionActionTypeId string + +// SecurityDetectionsAPIEndpointResponseAction_Params defines model for SecurityDetectionsAPIEndpointResponseAction.Params. +type SecurityDetectionsAPIEndpointResponseAction_Params struct { + union json.RawMessage +} + +// SecurityDetectionsAPIEqlOptionalFields defines model for Security_Detections_API_EqlOptionalFields. +type SecurityDetectionsAPIEqlOptionalFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + EventCategoryOverride *SecurityDetectionsAPIEventCategoryOverride `json:"event_category_override,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // TiebreakerField Sets a secondary field for sorting events + TiebreakerField *SecurityDetectionsAPITiebreakerField `json:"tiebreaker_field,omitempty"` + + // TimestampField Specifies the name of the event timestamp field used for sorting a sequence of events. Not to be confused with `timestamp_override`, which specifies the more general field used for querying events within a range. Defaults to the @timestamp ECS field. + TimestampField *SecurityDetectionsAPITimestampField `json:"timestamp_field,omitempty"` +} + +// SecurityDetectionsAPIEqlQueryLanguage defines model for Security_Detections_API_EqlQueryLanguage. +type SecurityDetectionsAPIEqlQueryLanguage string + +// SecurityDetectionsAPIEqlRequiredFields defines model for Security_Detections_API_EqlRequiredFields. +type SecurityDetectionsAPIEqlRequiredFields struct { + Language SecurityDetectionsAPIEqlQueryLanguage `json:"language"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // Type Rule type + Type SecurityDetectionsAPIEqlRequiredFieldsType `json:"type"` +} + +// SecurityDetectionsAPIEqlRequiredFieldsType Rule type +type SecurityDetectionsAPIEqlRequiredFieldsType string + +// SecurityDetectionsAPIEqlRule defines model for Security_Detections_API_EqlRule. +type SecurityDetectionsAPIEqlRule struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions []SecurityDetectionsAPIRuleAction `json:"actions"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author SecurityDetectionsAPIRuleAuthorArray `json:"author"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + CreatedAt time.Time `json:"created_at"` + CreatedBy string `json:"created_by"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled SecurityDetectionsAPIIsRuleEnabled `json:"enabled"` + EventCategoryOverride *SecurityDetectionsAPIEventCategoryOverride `json:"event_category_override,omitempty"` + ExceptionsList []SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list"` + + // ExecutionSummary Summary of the last execution of a rule. + // > info + // > This field is under development and its usage or schema may change + ExecutionSummary *SecurityDetectionsAPIRuleExecutionSummary `json:"execution_summary,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From SecurityDetectionsAPIRuleIntervalFrom `json:"from"` + + // Id A universally unique identifier + Id SecurityDetectionsAPIRuleObjectId `json:"id"` + + // Immutable This field determines whether the rule is a prebuilt Elastic rule. It will be replaced with the `rule_source` field. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Immutable SecurityDetectionsAPIIsRuleImmutable `json:"immutable"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval SecurityDetectionsAPIRuleInterval `json:"interval"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language SecurityDetectionsAPIEqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals SecurityDetectionsAPIMaxSignals `json:"max_signals"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References SecurityDetectionsAPIRuleReferenceArray `json:"references"` + RelatedIntegrations SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations"` + RequiredFields SecurityDetectionsAPIRequiredFieldArray `json:"required_fields"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // Revision The rule's revision number. + // + // It represents the version of rule's object in Kibana. It is set to `0` when the rule is installed or created and then gets incremented on each update. + // > info + // > Not all updates to any rule fields will increment the revision. Only those fields that are considered static `rule parameters` can trigger revision increments. For example, an update to a rule's query or index fields will increment the rule's revision by `1`. However, changes to dynamic or technical fields like enabled or execution_summary will not cause revision increments. + Revision SecurityDetectionsAPIRuleRevision `json:"revision"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId SecurityDetectionsAPIRuleSignatureId `json:"rule_id"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // RuleSource Discriminated union that determines whether the rule is internally sourced (created within the Kibana app) or has an external source, such as the Elastic Prebuilt rules repo. + RuleSource SecurityDetectionsAPIRuleSource `json:"rule_source"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup SecurityDetectionsAPISetupGuide `json:"setup"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping SecurityDetectionsAPISeverityMapping `json:"severity_mapping"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags SecurityDetectionsAPIRuleTagArray `json:"tags"` + Threat SecurityDetectionsAPIThreatArray `json:"threat"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TiebreakerField Sets a secondary field for sorting events + TiebreakerField *SecurityDetectionsAPITiebreakerField `json:"tiebreaker_field,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampField Specifies the name of the event timestamp field used for sorting a sequence of events. Not to be confused with `timestamp_override`, which specifies the more general field used for querying events within a range. Defaults to the @timestamp ECS field. + TimestampField *SecurityDetectionsAPITimestampField `json:"timestamp_field,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To SecurityDetectionsAPIRuleIntervalTo `json:"to"` + + // Type Rule type + Type SecurityDetectionsAPIEqlRuleType `json:"type"` + UpdatedAt time.Time `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version SecurityDetectionsAPIRuleVersion `json:"version"` +} + +// SecurityDetectionsAPIEqlRuleType Rule type +type SecurityDetectionsAPIEqlRuleType string + +// SecurityDetectionsAPIEqlRuleCreateFields defines model for Security_Detections_API_EqlRuleCreateFields. +type SecurityDetectionsAPIEqlRuleCreateFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + EventCategoryOverride *SecurityDetectionsAPIEventCategoryOverride `json:"event_category_override,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language SecurityDetectionsAPIEqlQueryLanguage `json:"language"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // TiebreakerField Sets a secondary field for sorting events + TiebreakerField *SecurityDetectionsAPITiebreakerField `json:"tiebreaker_field,omitempty"` + + // TimestampField Specifies the name of the event timestamp field used for sorting a sequence of events. Not to be confused with `timestamp_override`, which specifies the more general field used for querying events within a range. Defaults to the @timestamp ECS field. + TimestampField *SecurityDetectionsAPITimestampField `json:"timestamp_field,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIEqlRuleCreateFieldsType `json:"type"` +} + +// SecurityDetectionsAPIEqlRuleCreateFieldsType Rule type +type SecurityDetectionsAPIEqlRuleCreateFieldsType string + +// SecurityDetectionsAPIEqlRuleCreateProps defines model for Security_Detections_API_EqlRuleCreateProps. +type SecurityDetectionsAPIEqlRuleCreateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + EventCategoryOverride *SecurityDetectionsAPIEventCategoryOverride `json:"event_category_override,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language SecurityDetectionsAPIEqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TiebreakerField Sets a secondary field for sorting events + TiebreakerField *SecurityDetectionsAPITiebreakerField `json:"tiebreaker_field,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampField Specifies the name of the event timestamp field used for sorting a sequence of events. Not to be confused with `timestamp_override`, which specifies the more general field used for querying events within a range. Defaults to the @timestamp ECS field. + TimestampField *SecurityDetectionsAPITimestampField `json:"timestamp_field,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIEqlRuleCreatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIEqlRuleCreatePropsType Rule type +type SecurityDetectionsAPIEqlRuleCreatePropsType string + +// SecurityDetectionsAPIEqlRulePatchFields defines model for Security_Detections_API_EqlRulePatchFields. +type SecurityDetectionsAPIEqlRulePatchFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + EventCategoryOverride *SecurityDetectionsAPIEventCategoryOverride `json:"event_category_override,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language *SecurityDetectionsAPIEqlQueryLanguage `json:"language,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // TiebreakerField Sets a secondary field for sorting events + TiebreakerField *SecurityDetectionsAPITiebreakerField `json:"tiebreaker_field,omitempty"` + + // TimestampField Specifies the name of the event timestamp field used for sorting a sequence of events. Not to be confused with `timestamp_override`, which specifies the more general field used for querying events within a range. Defaults to the @timestamp ECS field. + TimestampField *SecurityDetectionsAPITimestampField `json:"timestamp_field,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPIEqlRulePatchFieldsType `json:"type,omitempty"` +} + +// SecurityDetectionsAPIEqlRulePatchFieldsType Rule type +type SecurityDetectionsAPIEqlRulePatchFieldsType string + +// SecurityDetectionsAPIEqlRulePatchProps defines model for Security_Detections_API_EqlRulePatchProps. +type SecurityDetectionsAPIEqlRulePatchProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description *SecurityDetectionsAPIRuleDescription `json:"description,omitempty"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + EventCategoryOverride *SecurityDetectionsAPIEventCategoryOverride `json:"event_category_override,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIEqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name *SecurityDetectionsAPIRuleName `json:"name,omitempty"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore *SecurityDetectionsAPIRiskScore `json:"risk_score,omitempty"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity *SecurityDetectionsAPISeverity `json:"severity,omitempty"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TiebreakerField Sets a secondary field for sorting events + TiebreakerField *SecurityDetectionsAPITiebreakerField `json:"tiebreaker_field,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampField Specifies the name of the event timestamp field used for sorting a sequence of events. Not to be confused with `timestamp_override`, which specifies the more general field used for querying events within a range. Defaults to the @timestamp ECS field. + TimestampField *SecurityDetectionsAPITimestampField `json:"timestamp_field,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPIEqlRulePatchPropsType `json:"type,omitempty"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIEqlRulePatchPropsType Rule type +type SecurityDetectionsAPIEqlRulePatchPropsType string + +// SecurityDetectionsAPIEqlRuleResponseFields defines model for Security_Detections_API_EqlRuleResponseFields. +type SecurityDetectionsAPIEqlRuleResponseFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + EventCategoryOverride *SecurityDetectionsAPIEventCategoryOverride `json:"event_category_override,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language SecurityDetectionsAPIEqlQueryLanguage `json:"language"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // TiebreakerField Sets a secondary field for sorting events + TiebreakerField *SecurityDetectionsAPITiebreakerField `json:"tiebreaker_field,omitempty"` + + // TimestampField Specifies the name of the event timestamp field used for sorting a sequence of events. Not to be confused with `timestamp_override`, which specifies the more general field used for querying events within a range. Defaults to the @timestamp ECS field. + TimestampField *SecurityDetectionsAPITimestampField `json:"timestamp_field,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIEqlRuleResponseFieldsType `json:"type"` +} + +// SecurityDetectionsAPIEqlRuleResponseFieldsType Rule type +type SecurityDetectionsAPIEqlRuleResponseFieldsType string + +// SecurityDetectionsAPIEqlRuleUpdateProps defines model for Security_Detections_API_EqlRuleUpdateProps. +type SecurityDetectionsAPIEqlRuleUpdateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + EventCategoryOverride *SecurityDetectionsAPIEventCategoryOverride `json:"event_category_override,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language SecurityDetectionsAPIEqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TiebreakerField Sets a secondary field for sorting events + TiebreakerField *SecurityDetectionsAPITiebreakerField `json:"tiebreaker_field,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampField Specifies the name of the event timestamp field used for sorting a sequence of events. Not to be confused with `timestamp_override`, which specifies the more general field used for querying events within a range. Defaults to the @timestamp ECS field. + TimestampField *SecurityDetectionsAPITimestampField `json:"timestamp_field,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIEqlRuleUpdatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIEqlRuleUpdatePropsType Rule type +type SecurityDetectionsAPIEqlRuleUpdatePropsType string + +// SecurityDetectionsAPIErrorSchema defines model for Security_Detections_API_ErrorSchema. +type SecurityDetectionsAPIErrorSchema struct { + Error struct { + Message string `json:"message"` + StatusCode int `json:"status_code"` + } `json:"error"` + Id *string `json:"id,omitempty"` + ItemId *string `json:"item_id,omitempty"` + ListId *string `json:"list_id,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` +} + +// SecurityDetectionsAPIEsqlQueryLanguage defines model for Security_Detections_API_EsqlQueryLanguage. +type SecurityDetectionsAPIEsqlQueryLanguage string + +// SecurityDetectionsAPIEsqlRule defines model for Security_Detections_API_EsqlRule. +type SecurityDetectionsAPIEsqlRule struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions []SecurityDetectionsAPIRuleAction `json:"actions"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author SecurityDetectionsAPIRuleAuthorArray `json:"author"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + CreatedAt time.Time `json:"created_at"` + CreatedBy string `json:"created_by"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled SecurityDetectionsAPIIsRuleEnabled `json:"enabled"` + ExceptionsList []SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list"` + + // ExecutionSummary Summary of the last execution of a rule. + // > info + // > This field is under development and its usage or schema may change + ExecutionSummary *SecurityDetectionsAPIRuleExecutionSummary `json:"execution_summary,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From SecurityDetectionsAPIRuleIntervalFrom `json:"from"` + + // Id A universally unique identifier + Id SecurityDetectionsAPIRuleObjectId `json:"id"` + + // Immutable This field determines whether the rule is a prebuilt Elastic rule. It will be replaced with the `rule_source` field. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Immutable SecurityDetectionsAPIIsRuleImmutable `json:"immutable"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval SecurityDetectionsAPIRuleInterval `json:"interval"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language SecurityDetectionsAPIEsqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals SecurityDetectionsAPIMaxSignals `json:"max_signals"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References SecurityDetectionsAPIRuleReferenceArray `json:"references"` + RelatedIntegrations SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations"` + RequiredFields SecurityDetectionsAPIRequiredFieldArray `json:"required_fields"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // Revision The rule's revision number. + // + // It represents the version of rule's object in Kibana. It is set to `0` when the rule is installed or created and then gets incremented on each update. + // > info + // > Not all updates to any rule fields will increment the revision. Only those fields that are considered static `rule parameters` can trigger revision increments. For example, an update to a rule's query or index fields will increment the rule's revision by `1`. However, changes to dynamic or technical fields like enabled or execution_summary will not cause revision increments. + Revision SecurityDetectionsAPIRuleRevision `json:"revision"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId SecurityDetectionsAPIRuleSignatureId `json:"rule_id"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // RuleSource Discriminated union that determines whether the rule is internally sourced (created within the Kibana app) or has an external source, such as the Elastic Prebuilt rules repo. + RuleSource SecurityDetectionsAPIRuleSource `json:"rule_source"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup SecurityDetectionsAPISetupGuide `json:"setup"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping SecurityDetectionsAPISeverityMapping `json:"severity_mapping"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags SecurityDetectionsAPIRuleTagArray `json:"tags"` + Threat SecurityDetectionsAPIThreatArray `json:"threat"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To SecurityDetectionsAPIRuleIntervalTo `json:"to"` + + // Type Rule type + Type SecurityDetectionsAPIEsqlRuleType `json:"type"` + UpdatedAt time.Time `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version SecurityDetectionsAPIRuleVersion `json:"version"` +} + +// SecurityDetectionsAPIEsqlRuleType Rule type +type SecurityDetectionsAPIEsqlRuleType string + +// SecurityDetectionsAPIEsqlRuleCreateFields defines model for Security_Detections_API_EsqlRuleCreateFields. +type SecurityDetectionsAPIEsqlRuleCreateFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + Language SecurityDetectionsAPIEsqlQueryLanguage `json:"language"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // Type Rule type + Type SecurityDetectionsAPIEsqlRuleCreateFieldsType `json:"type"` +} + +// SecurityDetectionsAPIEsqlRuleCreateFieldsType Rule type +type SecurityDetectionsAPIEsqlRuleCreateFieldsType string + +// SecurityDetectionsAPIEsqlRuleCreateProps defines model for Security_Detections_API_EsqlRuleCreateProps. +type SecurityDetectionsAPIEsqlRuleCreateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language SecurityDetectionsAPIEsqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIEsqlRuleCreatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIEsqlRuleCreatePropsType Rule type +type SecurityDetectionsAPIEsqlRuleCreatePropsType string + +// SecurityDetectionsAPIEsqlRuleOptionalFields defines model for Security_Detections_API_EsqlRuleOptionalFields. +type SecurityDetectionsAPIEsqlRuleOptionalFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` +} + +// SecurityDetectionsAPIEsqlRulePatchProps defines model for Security_Detections_API_EsqlRulePatchProps. +type SecurityDetectionsAPIEsqlRulePatchProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + + // Description The rule’s description. + Description *SecurityDetectionsAPIRuleDescription `json:"description,omitempty"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIEsqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name *SecurityDetectionsAPIRuleName `json:"name,omitempty"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore *SecurityDetectionsAPIRiskScore `json:"risk_score,omitempty"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity *SecurityDetectionsAPISeverity `json:"severity,omitempty"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPIEsqlRulePatchPropsType `json:"type,omitempty"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIEsqlRulePatchPropsType Rule type +type SecurityDetectionsAPIEsqlRulePatchPropsType string + +// SecurityDetectionsAPIEsqlRuleRequiredFields defines model for Security_Detections_API_EsqlRuleRequiredFields. +type SecurityDetectionsAPIEsqlRuleRequiredFields struct { + Language SecurityDetectionsAPIEsqlQueryLanguage `json:"language"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // Type Rule type + Type SecurityDetectionsAPIEsqlRuleRequiredFieldsType `json:"type"` +} + +// SecurityDetectionsAPIEsqlRuleRequiredFieldsType Rule type +type SecurityDetectionsAPIEsqlRuleRequiredFieldsType string + +// SecurityDetectionsAPIEsqlRuleResponseFields defines model for Security_Detections_API_EsqlRuleResponseFields. +type SecurityDetectionsAPIEsqlRuleResponseFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + Language SecurityDetectionsAPIEsqlQueryLanguage `json:"language"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // Type Rule type + Type SecurityDetectionsAPIEsqlRuleResponseFieldsType `json:"type"` +} + +// SecurityDetectionsAPIEsqlRuleResponseFieldsType Rule type +type SecurityDetectionsAPIEsqlRuleResponseFieldsType string + +// SecurityDetectionsAPIEsqlRuleUpdateProps defines model for Security_Detections_API_EsqlRuleUpdateProps. +type SecurityDetectionsAPIEsqlRuleUpdateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language SecurityDetectionsAPIEsqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIEsqlRuleUpdatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIEsqlRuleUpdatePropsType Rule type +type SecurityDetectionsAPIEsqlRuleUpdatePropsType string + +// SecurityDetectionsAPIEventCategoryOverride defines model for Security_Detections_API_EventCategoryOverride. +type SecurityDetectionsAPIEventCategoryOverride = string + +// SecurityDetectionsAPIExceptionListType The exception type +type SecurityDetectionsAPIExceptionListType string + +// SecurityDetectionsAPIExternalRuleSource Type of rule source for externally sourced rules, i.e. rules that have an external source, such as the Elastic Prebuilt rules repo. +type SecurityDetectionsAPIExternalRuleSource struct { + // IsCustomized Determines whether an external/prebuilt rule has been customized by the user (i.e. any of its fields have been modified and diverged from the base value). + IsCustomized SecurityDetectionsAPIIsExternalRuleCustomized `json:"is_customized"` + Type SecurityDetectionsAPIExternalRuleSourceType `json:"type"` +} + +// SecurityDetectionsAPIExternalRuleSourceType defines model for SecurityDetectionsAPIExternalRuleSource.Type. +type SecurityDetectionsAPIExternalRuleSourceType string + +// SecurityDetectionsAPIFindRulesSortField defines model for Security_Detections_API_FindRulesSortField. +type SecurityDetectionsAPIFindRulesSortField string + +// SecurityDetectionsAPIHistoryWindowStart Start date to use when checking if a term has been seen before. Supports relative dates – for example, now-30d will search the last 30 days of data when checking if a term is new. We do not recommend using absolute dates, which can cause issues with rule performance due to querying increasing amounts of data over time. +type SecurityDetectionsAPIHistoryWindowStart = string + +// SecurityDetectionsAPIIndexMigrationStatus defines model for Security_Detections_API_IndexMigrationStatus. +type SecurityDetectionsAPIIndexMigrationStatus struct { + // Index A string that does not contain only whitespace characters + Index SecurityDetectionsAPINonEmptyString `json:"index"` + IsOutdated bool `json:"is_outdated"` + Migrations []SecurityDetectionsAPIMigrationStatus `json:"migrations"` + SignalVersions []SecurityDetectionsAPIAlertVersion `json:"signal_versions"` + Version int `json:"version"` +} + +// SecurityDetectionsAPIIndexPatternArray Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). +// > info +// > This field is not supported for ES|QL rules. +type SecurityDetectionsAPIIndexPatternArray = []string + +// SecurityDetectionsAPIInternalRuleSource Type of rule source for internally sourced rules, i.e. created within the Kibana apps. +type SecurityDetectionsAPIInternalRuleSource struct { + Type SecurityDetectionsAPIInternalRuleSourceType `json:"type"` +} + +// SecurityDetectionsAPIInternalRuleSourceType defines model for SecurityDetectionsAPIInternalRuleSource.Type. +type SecurityDetectionsAPIInternalRuleSourceType string + +// SecurityDetectionsAPIInvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight +// in various features in the UI such as alert details flyout and exceptions auto-population from alert. +type SecurityDetectionsAPIInvestigationFields struct { + FieldNames []SecurityDetectionsAPINonEmptyString `json:"field_names"` +} + +// SecurityDetectionsAPIInvestigationGuide Notes to help investigate alerts produced by the rule. +type SecurityDetectionsAPIInvestigationGuide = string + +// SecurityDetectionsAPIIsExternalRuleCustomized Determines whether an external/prebuilt rule has been customized by the user (i.e. any of its fields have been modified and diverged from the base value). +type SecurityDetectionsAPIIsExternalRuleCustomized = bool + +// SecurityDetectionsAPIIsRuleEnabled Determines whether the rule is enabled. Defaults to true. +type SecurityDetectionsAPIIsRuleEnabled = bool + +// SecurityDetectionsAPIIsRuleImmutable This field determines whether the rule is a prebuilt Elastic rule. It will be replaced with the `rule_source` field. +type SecurityDetectionsAPIIsRuleImmutable = bool + +// SecurityDetectionsAPIItemsPerSearch defines model for Security_Detections_API_ItemsPerSearch. +type SecurityDetectionsAPIItemsPerSearch = int + +// SecurityDetectionsAPIKqlQueryLanguage defines model for Security_Detections_API_KqlQueryLanguage. +type SecurityDetectionsAPIKqlQueryLanguage string + +// SecurityDetectionsAPIMachineLearningJobId Machine learning job ID(s) the rule monitors for anomaly scores. +type SecurityDetectionsAPIMachineLearningJobId struct { + union json.RawMessage +} + +// SecurityDetectionsAPIMachineLearningJobId0 defines model for . +type SecurityDetectionsAPIMachineLearningJobId0 = string + +// SecurityDetectionsAPIMachineLearningJobId1 defines model for . +type SecurityDetectionsAPIMachineLearningJobId1 = []string + +// SecurityDetectionsAPIMachineLearningRule defines model for Security_Detections_API_MachineLearningRule. +type SecurityDetectionsAPIMachineLearningRule struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions []SecurityDetectionsAPIRuleAction `json:"actions"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // AnomalyThreshold Anomaly score threshold above which the rule creates an alert. Valid values are from 0 to 100. + AnomalyThreshold SecurityDetectionsAPIAnomalyThreshold `json:"anomaly_threshold"` + + // Author The rule’s author. + Author SecurityDetectionsAPIRuleAuthorArray `json:"author"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + CreatedAt time.Time `json:"created_at"` + CreatedBy string `json:"created_by"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled SecurityDetectionsAPIIsRuleEnabled `json:"enabled"` + ExceptionsList []SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list"` + + // ExecutionSummary Summary of the last execution of a rule. + // > info + // > This field is under development and its usage or schema may change + ExecutionSummary *SecurityDetectionsAPIRuleExecutionSummary `json:"execution_summary,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From SecurityDetectionsAPIRuleIntervalFrom `json:"from"` + + // Id A universally unique identifier + Id SecurityDetectionsAPIRuleObjectId `json:"id"` + + // Immutable This field determines whether the rule is a prebuilt Elastic rule. It will be replaced with the `rule_source` field. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Immutable SecurityDetectionsAPIIsRuleImmutable `json:"immutable"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval SecurityDetectionsAPIRuleInterval `json:"interval"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MachineLearningJobId Machine learning job ID(s) the rule monitors for anomaly scores. + MachineLearningJobId SecurityDetectionsAPIMachineLearningJobId `json:"machine_learning_job_id"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals SecurityDetectionsAPIMaxSignals `json:"max_signals"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References SecurityDetectionsAPIRuleReferenceArray `json:"references"` + RelatedIntegrations SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations"` + RequiredFields SecurityDetectionsAPIRequiredFieldArray `json:"required_fields"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // Revision The rule's revision number. + // + // It represents the version of rule's object in Kibana. It is set to `0` when the rule is installed or created and then gets incremented on each update. + // > info + // > Not all updates to any rule fields will increment the revision. Only those fields that are considered static `rule parameters` can trigger revision increments. For example, an update to a rule's query or index fields will increment the rule's revision by `1`. However, changes to dynamic or technical fields like enabled or execution_summary will not cause revision increments. + Revision SecurityDetectionsAPIRuleRevision `json:"revision"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId SecurityDetectionsAPIRuleSignatureId `json:"rule_id"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // RuleSource Discriminated union that determines whether the rule is internally sourced (created within the Kibana app) or has an external source, such as the Elastic Prebuilt rules repo. + RuleSource SecurityDetectionsAPIRuleSource `json:"rule_source"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup SecurityDetectionsAPISetupGuide `json:"setup"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping SecurityDetectionsAPISeverityMapping `json:"severity_mapping"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags SecurityDetectionsAPIRuleTagArray `json:"tags"` + Threat SecurityDetectionsAPIThreatArray `json:"threat"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To SecurityDetectionsAPIRuleIntervalTo `json:"to"` + + // Type Rule type + Type SecurityDetectionsAPIMachineLearningRuleType `json:"type"` + UpdatedAt time.Time `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version SecurityDetectionsAPIRuleVersion `json:"version"` +} + +// SecurityDetectionsAPIMachineLearningRuleType Rule type +type SecurityDetectionsAPIMachineLearningRuleType string + +// SecurityDetectionsAPIMachineLearningRuleCreateFields defines model for Security_Detections_API_MachineLearningRuleCreateFields. +type SecurityDetectionsAPIMachineLearningRuleCreateFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + + // AnomalyThreshold Anomaly score threshold above which the rule creates an alert. Valid values are from 0 to 100. + AnomalyThreshold SecurityDetectionsAPIAnomalyThreshold `json:"anomaly_threshold"` + + // MachineLearningJobId Machine learning job ID(s) the rule monitors for anomaly scores. + MachineLearningJobId SecurityDetectionsAPIMachineLearningJobId `json:"machine_learning_job_id"` + + // Type Rule type + Type SecurityDetectionsAPIMachineLearningRuleCreateFieldsType `json:"type"` +} + +// SecurityDetectionsAPIMachineLearningRuleCreateFieldsType Rule type +type SecurityDetectionsAPIMachineLearningRuleCreateFieldsType string + +// SecurityDetectionsAPIMachineLearningRuleCreateProps defines model for Security_Detections_API_MachineLearningRuleCreateProps. +type SecurityDetectionsAPIMachineLearningRuleCreateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // AnomalyThreshold Anomaly score threshold above which the rule creates an alert. Valid values are from 0 to 100. + AnomalyThreshold SecurityDetectionsAPIAnomalyThreshold `json:"anomaly_threshold"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MachineLearningJobId Machine learning job ID(s) the rule monitors for anomaly scores. + MachineLearningJobId SecurityDetectionsAPIMachineLearningJobId `json:"machine_learning_job_id"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIMachineLearningRuleCreatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIMachineLearningRuleCreatePropsType Rule type +type SecurityDetectionsAPIMachineLearningRuleCreatePropsType string + +// SecurityDetectionsAPIMachineLearningRuleOptionalFields defines model for Security_Detections_API_MachineLearningRuleOptionalFields. +type SecurityDetectionsAPIMachineLearningRuleOptionalFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` +} + +// SecurityDetectionsAPIMachineLearningRulePatchFields defines model for Security_Detections_API_MachineLearningRulePatchFields. +type SecurityDetectionsAPIMachineLearningRulePatchFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + + // AnomalyThreshold Anomaly score threshold above which the rule creates an alert. Valid values are from 0 to 100. + AnomalyThreshold *SecurityDetectionsAPIAnomalyThreshold `json:"anomaly_threshold,omitempty"` + + // MachineLearningJobId Machine learning job ID(s) the rule monitors for anomaly scores. + MachineLearningJobId *SecurityDetectionsAPIMachineLearningJobId `json:"machine_learning_job_id,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPIMachineLearningRulePatchFieldsType `json:"type,omitempty"` +} + +// SecurityDetectionsAPIMachineLearningRulePatchFieldsType Rule type +type SecurityDetectionsAPIMachineLearningRulePatchFieldsType string + +// SecurityDetectionsAPIMachineLearningRulePatchProps defines model for Security_Detections_API_MachineLearningRulePatchProps. +type SecurityDetectionsAPIMachineLearningRulePatchProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // AnomalyThreshold Anomaly score threshold above which the rule creates an alert. Valid values are from 0 to 100. + AnomalyThreshold *SecurityDetectionsAPIAnomalyThreshold `json:"anomaly_threshold,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + + // Description The rule’s description. + Description *SecurityDetectionsAPIRuleDescription `json:"description,omitempty"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MachineLearningJobId Machine learning job ID(s) the rule monitors for anomaly scores. + MachineLearningJobId *SecurityDetectionsAPIMachineLearningJobId `json:"machine_learning_job_id,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name *SecurityDetectionsAPIRuleName `json:"name,omitempty"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore *SecurityDetectionsAPIRiskScore `json:"risk_score,omitempty"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity *SecurityDetectionsAPISeverity `json:"severity,omitempty"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPIMachineLearningRulePatchPropsType `json:"type,omitempty"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIMachineLearningRulePatchPropsType Rule type +type SecurityDetectionsAPIMachineLearningRulePatchPropsType string + +// SecurityDetectionsAPIMachineLearningRuleRequiredFields defines model for Security_Detections_API_MachineLearningRuleRequiredFields. +type SecurityDetectionsAPIMachineLearningRuleRequiredFields struct { + // AnomalyThreshold Anomaly score threshold above which the rule creates an alert. Valid values are from 0 to 100. + AnomalyThreshold SecurityDetectionsAPIAnomalyThreshold `json:"anomaly_threshold"` + + // MachineLearningJobId Machine learning job ID(s) the rule monitors for anomaly scores. + MachineLearningJobId SecurityDetectionsAPIMachineLearningJobId `json:"machine_learning_job_id"` + + // Type Rule type + Type SecurityDetectionsAPIMachineLearningRuleRequiredFieldsType `json:"type"` +} + +// SecurityDetectionsAPIMachineLearningRuleRequiredFieldsType Rule type +type SecurityDetectionsAPIMachineLearningRuleRequiredFieldsType string + +// SecurityDetectionsAPIMachineLearningRuleResponseFields defines model for Security_Detections_API_MachineLearningRuleResponseFields. +type SecurityDetectionsAPIMachineLearningRuleResponseFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + + // AnomalyThreshold Anomaly score threshold above which the rule creates an alert. Valid values are from 0 to 100. + AnomalyThreshold SecurityDetectionsAPIAnomalyThreshold `json:"anomaly_threshold"` + + // MachineLearningJobId Machine learning job ID(s) the rule monitors for anomaly scores. + MachineLearningJobId SecurityDetectionsAPIMachineLearningJobId `json:"machine_learning_job_id"` + + // Type Rule type + Type SecurityDetectionsAPIMachineLearningRuleResponseFieldsType `json:"type"` +} + +// SecurityDetectionsAPIMachineLearningRuleResponseFieldsType Rule type +type SecurityDetectionsAPIMachineLearningRuleResponseFieldsType string + +// SecurityDetectionsAPIMachineLearningRuleUpdateProps defines model for Security_Detections_API_MachineLearningRuleUpdateProps. +type SecurityDetectionsAPIMachineLearningRuleUpdateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // AnomalyThreshold Anomaly score threshold above which the rule creates an alert. Valid values are from 0 to 100. + AnomalyThreshold SecurityDetectionsAPIAnomalyThreshold `json:"anomaly_threshold"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MachineLearningJobId Machine learning job ID(s) the rule monitors for anomaly scores. + MachineLearningJobId SecurityDetectionsAPIMachineLearningJobId `json:"machine_learning_job_id"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIMachineLearningRuleUpdatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIMachineLearningRuleUpdatePropsType Rule type +type SecurityDetectionsAPIMachineLearningRuleUpdatePropsType string + +// SecurityDetectionsAPIMaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). +// > info +// > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. +type SecurityDetectionsAPIMaxSignals = int + +// SecurityDetectionsAPIMigrationCleanupResult defines model for Security_Detections_API_MigrationCleanupResult. +type SecurityDetectionsAPIMigrationCleanupResult struct { + DestinationIndex string `json:"destinationIndex"` + Error *struct { + Message string `json:"message"` + StatusCode int `json:"status_code"` + } `json:"error,omitempty"` + Id string `json:"id"` + SourceIndex string `json:"sourceIndex"` + Status SecurityDetectionsAPIMigrationCleanupResultStatus `json:"status"` + Updated time.Time `json:"updated"` + Version string `json:"version"` +} + +// SecurityDetectionsAPIMigrationCleanupResultStatus defines model for SecurityDetectionsAPIMigrationCleanupResult.Status. +type SecurityDetectionsAPIMigrationCleanupResultStatus string + +// SecurityDetectionsAPIMigrationFinalizationResult defines model for Security_Detections_API_MigrationFinalizationResult. +type SecurityDetectionsAPIMigrationFinalizationResult struct { + Completed bool `json:"completed"` + DestinationIndex string `json:"destinationIndex"` + Error *struct { + Message string `json:"message"` + StatusCode int `json:"status_code"` + } `json:"error,omitempty"` + Id string `json:"id"` + SourceIndex string `json:"sourceIndex"` + Status SecurityDetectionsAPIMigrationFinalizationResultStatus `json:"status"` + Updated time.Time `json:"updated"` + Version string `json:"version"` +} + +// SecurityDetectionsAPIMigrationFinalizationResultStatus defines model for SecurityDetectionsAPIMigrationFinalizationResult.Status. +type SecurityDetectionsAPIMigrationFinalizationResultStatus string + +// SecurityDetectionsAPIMigrationStatus defines model for Security_Detections_API_MigrationStatus. +type SecurityDetectionsAPIMigrationStatus struct { + // Id A string that does not contain only whitespace characters + Id SecurityDetectionsAPINonEmptyString `json:"id"` + Status SecurityDetectionsAPIMigrationStatusStatus `json:"status"` + Updated time.Time `json:"updated"` + Version int `json:"version"` +} + +// SecurityDetectionsAPIMigrationStatusStatus defines model for SecurityDetectionsAPIMigrationStatus.Status. +type SecurityDetectionsAPIMigrationStatusStatus string + +// SecurityDetectionsAPINewTermsFields Fields to monitor for new values. +type SecurityDetectionsAPINewTermsFields = []string + +// SecurityDetectionsAPINewTermsRule defines model for Security_Detections_API_NewTermsRule. +type SecurityDetectionsAPINewTermsRule struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions []SecurityDetectionsAPIRuleAction `json:"actions"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author SecurityDetectionsAPIRuleAuthorArray `json:"author"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + CreatedAt time.Time `json:"created_at"` + CreatedBy string `json:"created_by"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled SecurityDetectionsAPIIsRuleEnabled `json:"enabled"` + ExceptionsList []SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list"` + + // ExecutionSummary Summary of the last execution of a rule. + // > info + // > This field is under development and its usage or schema may change + ExecutionSummary *SecurityDetectionsAPIRuleExecutionSummary `json:"execution_summary,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From SecurityDetectionsAPIRuleIntervalFrom `json:"from"` + + // HistoryWindowStart Start date to use when checking if a term has been seen before. Supports relative dates – for example, now-30d will search the last 30 days of data when checking if a term is new. We do not recommend using absolute dates, which can cause issues with rule performance due to querying increasing amounts of data over time. + HistoryWindowStart SecurityDetectionsAPIHistoryWindowStart `json:"history_window_start"` + + // Id A universally unique identifier + Id SecurityDetectionsAPIRuleObjectId `json:"id"` + + // Immutable This field determines whether the rule is a prebuilt Elastic rule. It will be replaced with the `rule_source` field. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Immutable SecurityDetectionsAPIIsRuleImmutable `json:"immutable"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval SecurityDetectionsAPIRuleInterval `json:"interval"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language SecurityDetectionsAPIKqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals SecurityDetectionsAPIMaxSignals `json:"max_signals"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // NewTermsFields Fields to monitor for new values. + NewTermsFields SecurityDetectionsAPINewTermsFields `json:"new_terms_fields"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References SecurityDetectionsAPIRuleReferenceArray `json:"references"` + RelatedIntegrations SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations"` + RequiredFields SecurityDetectionsAPIRequiredFieldArray `json:"required_fields"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // Revision The rule's revision number. + // + // It represents the version of rule's object in Kibana. It is set to `0` when the rule is installed or created and then gets incremented on each update. + // > info + // > Not all updates to any rule fields will increment the revision. Only those fields that are considered static `rule parameters` can trigger revision increments. For example, an update to a rule's query or index fields will increment the rule's revision by `1`. However, changes to dynamic or technical fields like enabled or execution_summary will not cause revision increments. + Revision SecurityDetectionsAPIRuleRevision `json:"revision"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId SecurityDetectionsAPIRuleSignatureId `json:"rule_id"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // RuleSource Discriminated union that determines whether the rule is internally sourced (created within the Kibana app) or has an external source, such as the Elastic Prebuilt rules repo. + RuleSource SecurityDetectionsAPIRuleSource `json:"rule_source"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup SecurityDetectionsAPISetupGuide `json:"setup"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping SecurityDetectionsAPISeverityMapping `json:"severity_mapping"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags SecurityDetectionsAPIRuleTagArray `json:"tags"` + Threat SecurityDetectionsAPIThreatArray `json:"threat"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To SecurityDetectionsAPIRuleIntervalTo `json:"to"` + + // Type Rule type + Type SecurityDetectionsAPINewTermsRuleType `json:"type"` + UpdatedAt time.Time `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version SecurityDetectionsAPIRuleVersion `json:"version"` +} + +// SecurityDetectionsAPINewTermsRuleType Rule type +type SecurityDetectionsAPINewTermsRuleType string + +// SecurityDetectionsAPINewTermsRuleCreateFields defines model for Security_Detections_API_NewTermsRuleCreateFields. +type SecurityDetectionsAPINewTermsRuleCreateFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // HistoryWindowStart Start date to use when checking if a term has been seen before. Supports relative dates – for example, now-30d will search the last 30 days of data when checking if a term is new. We do not recommend using absolute dates, which can cause issues with rule performance due to querying increasing amounts of data over time. + HistoryWindowStart SecurityDetectionsAPIHistoryWindowStart `json:"history_window_start"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // NewTermsFields Fields to monitor for new values. + NewTermsFields SecurityDetectionsAPINewTermsFields `json:"new_terms_fields"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // Type Rule type + Type SecurityDetectionsAPINewTermsRuleCreateFieldsType `json:"type"` +} + +// SecurityDetectionsAPINewTermsRuleCreateFieldsType Rule type +type SecurityDetectionsAPINewTermsRuleCreateFieldsType string + +// SecurityDetectionsAPINewTermsRuleCreateProps defines model for Security_Detections_API_NewTermsRuleCreateProps. +type SecurityDetectionsAPINewTermsRuleCreateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // HistoryWindowStart Start date to use when checking if a term has been seen before. Supports relative dates – for example, now-30d will search the last 30 days of data when checking if a term is new. We do not recommend using absolute dates, which can cause issues with rule performance due to querying increasing amounts of data over time. + HistoryWindowStart SecurityDetectionsAPIHistoryWindowStart `json:"history_window_start"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // NewTermsFields Fields to monitor for new values. + NewTermsFields SecurityDetectionsAPINewTermsFields `json:"new_terms_fields"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPINewTermsRuleCreatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPINewTermsRuleCreatePropsType Rule type +type SecurityDetectionsAPINewTermsRuleCreatePropsType string + +// SecurityDetectionsAPINewTermsRuleDefaultableFields defines model for Security_Detections_API_NewTermsRuleDefaultableFields. +type SecurityDetectionsAPINewTermsRuleDefaultableFields struct { + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` +} + +// SecurityDetectionsAPINewTermsRuleOptionalFields defines model for Security_Detections_API_NewTermsRuleOptionalFields. +type SecurityDetectionsAPINewTermsRuleOptionalFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` +} + +// SecurityDetectionsAPINewTermsRulePatchFields defines model for Security_Detections_API_NewTermsRulePatchFields. +type SecurityDetectionsAPINewTermsRulePatchFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // HistoryWindowStart Start date to use when checking if a term has been seen before. Supports relative dates – for example, now-30d will search the last 30 days of data when checking if a term is new. We do not recommend using absolute dates, which can cause issues with rule performance due to querying increasing amounts of data over time. + HistoryWindowStart *SecurityDetectionsAPIHistoryWindowStart `json:"history_window_start,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // NewTermsFields Fields to monitor for new values. + NewTermsFields *SecurityDetectionsAPINewTermsFields `json:"new_terms_fields,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPINewTermsRulePatchFieldsType `json:"type,omitempty"` +} + +// SecurityDetectionsAPINewTermsRulePatchFieldsType Rule type +type SecurityDetectionsAPINewTermsRulePatchFieldsType string + +// SecurityDetectionsAPINewTermsRulePatchProps defines model for Security_Detections_API_NewTermsRulePatchProps. +type SecurityDetectionsAPINewTermsRulePatchProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description *SecurityDetectionsAPIRuleDescription `json:"description,omitempty"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // HistoryWindowStart Start date to use when checking if a term has been seen before. Supports relative dates – for example, now-30d will search the last 30 days of data when checking if a term is new. We do not recommend using absolute dates, which can cause issues with rule performance due to querying increasing amounts of data over time. + HistoryWindowStart *SecurityDetectionsAPIHistoryWindowStart `json:"history_window_start,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name *SecurityDetectionsAPIRuleName `json:"name,omitempty"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // NewTermsFields Fields to monitor for new values. + NewTermsFields *SecurityDetectionsAPINewTermsFields `json:"new_terms_fields,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore *SecurityDetectionsAPIRiskScore `json:"risk_score,omitempty"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity *SecurityDetectionsAPISeverity `json:"severity,omitempty"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPINewTermsRulePatchPropsType `json:"type,omitempty"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPINewTermsRulePatchPropsType Rule type +type SecurityDetectionsAPINewTermsRulePatchPropsType string + +// SecurityDetectionsAPINewTermsRuleRequiredFields defines model for Security_Detections_API_NewTermsRuleRequiredFields. +type SecurityDetectionsAPINewTermsRuleRequiredFields struct { + // HistoryWindowStart Start date to use when checking if a term has been seen before. Supports relative dates – for example, now-30d will search the last 30 days of data when checking if a term is new. We do not recommend using absolute dates, which can cause issues with rule performance due to querying increasing amounts of data over time. + HistoryWindowStart SecurityDetectionsAPIHistoryWindowStart `json:"history_window_start"` + + // NewTermsFields Fields to monitor for new values. + NewTermsFields SecurityDetectionsAPINewTermsFields `json:"new_terms_fields"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // Type Rule type + Type SecurityDetectionsAPINewTermsRuleRequiredFieldsType `json:"type"` +} + +// SecurityDetectionsAPINewTermsRuleRequiredFieldsType Rule type +type SecurityDetectionsAPINewTermsRuleRequiredFieldsType string + +// SecurityDetectionsAPINewTermsRuleResponseFields defines model for Security_Detections_API_NewTermsRuleResponseFields. +type SecurityDetectionsAPINewTermsRuleResponseFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // HistoryWindowStart Start date to use when checking if a term has been seen before. Supports relative dates – for example, now-30d will search the last 30 days of data when checking if a term is new. We do not recommend using absolute dates, which can cause issues with rule performance due to querying increasing amounts of data over time. + HistoryWindowStart SecurityDetectionsAPIHistoryWindowStart `json:"history_window_start"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language SecurityDetectionsAPIKqlQueryLanguage `json:"language"` + + // NewTermsFields Fields to monitor for new values. + NewTermsFields SecurityDetectionsAPINewTermsFields `json:"new_terms_fields"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // Type Rule type + Type SecurityDetectionsAPINewTermsRuleResponseFieldsType `json:"type"` +} + +// SecurityDetectionsAPINewTermsRuleResponseFieldsType Rule type +type SecurityDetectionsAPINewTermsRuleResponseFieldsType string + +// SecurityDetectionsAPINewTermsRuleUpdateProps defines model for Security_Detections_API_NewTermsRuleUpdateProps. +type SecurityDetectionsAPINewTermsRuleUpdateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // HistoryWindowStart Start date to use when checking if a term has been seen before. Supports relative dates – for example, now-30d will search the last 30 days of data when checking if a term is new. We do not recommend using absolute dates, which can cause issues with rule performance due to querying increasing amounts of data over time. + HistoryWindowStart SecurityDetectionsAPIHistoryWindowStart `json:"history_window_start"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // NewTermsFields Fields to monitor for new values. + NewTermsFields SecurityDetectionsAPINewTermsFields `json:"new_terms_fields"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPINewTermsRuleUpdatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPINewTermsRuleUpdatePropsType Rule type +type SecurityDetectionsAPINewTermsRuleUpdatePropsType string + +// SecurityDetectionsAPINonEmptyString A string that does not contain only whitespace characters +type SecurityDetectionsAPINonEmptyString = string + +// SecurityDetectionsAPINormalizedRuleAction defines model for Security_Detections_API_NormalizedRuleAction. +type SecurityDetectionsAPINormalizedRuleAction struct { + // AlertsFilter Object containing an action’s conditional filters. + // + // - `timeframe` (object, optional): Object containing the time frame for when this action can be run. + // - `days` (array of integers, required): List of days of the week on which this action will be run. Days of the week are expressed as numbers between `1-7`, where `1` is Monday and `7` is Sunday. To select all days of the week, enter an empty array. + // - `hours` (object, required): The hours of the day during which this action will run. Hours of the day are expressed as two strings in the format `hh:mm` in `24` hour time. A start of `00:00` and an end of `24:00` means the action can run all day. + // - start (string, required): Start time in `hh:mm` format. + // - end (string, required): End time in `hh:mm` format. + // - `timezone` (string, required): An ISO timezone name, such as `Europe/Madrid` or `America/New_York`. Specific offsets such as `UTC` or `UTC+1` will also work, but lack built-in DST. + // - `query` (object, optional): Object containing a query filter which gets applied to an action and determines whether the action should run. + // - `kql` (string, required): A KQL string. + // - `filters` (array of objects, required): Array of filter objects, as defined in the `kbn-es-query` package. + AlertsFilter *SecurityDetectionsAPIRuleActionAlertsFilter `json:"alerts_filter,omitempty"` + + // Frequency The action frequency defines when the action runs (for example, only on rule execution or at specific time intervals). + Frequency *SecurityDetectionsAPIRuleActionFrequency `json:"frequency,omitempty"` + + // Group Optionally groups actions by use cases. Use `default` for alert notifications. + Group *SecurityDetectionsAPIRuleActionGroup `json:"group,omitempty"` + + // Id The connector ID. + Id SecurityDetectionsAPIRuleActionId `json:"id"` + + // Params Object containing the allowed connector fields, which varies according to the connector type. + // + // For Slack: + // + // - `message` (string, required): The notification message. + // + // For email: + // + // - `to`, `cc`, `bcc` (string): Email addresses to which the notifications are sent. At least one field must have a value. + // - `subject` (string, optional): Email subject line. + // - `message` (string, required): Email body text. + // + // For Webhook: + // + // - `body` (string, required): JSON payload. + // + // For PagerDuty: + // + // - `severity` (string, required): Severity of on the alert notification, can be: `Critical`, `Error`, `Warning` or `Info`. + // - `eventAction` (string, required): Event [action type](https://v2.developer.pagerduty.com/docs/events-api-v2#event-action), which can be `trigger`, `resolve`, or `acknowledge`. + // - `dedupKey` (string, optional): Groups alert notifications with the same PagerDuty alert. + // - `timestamp` (DateTime, optional): ISO-8601 format [timestamp](https://v2.developer.pagerduty.com/docs/types#datetime). + // - `component` (string, optional): Source machine component responsible for the event, for example `security-solution`. + // - `group` (string, optional): Enables logical grouping of service components. + // - `source` (string, optional): The affected system. Defaults to the Kibana saved object ID of the action. + // - `summary` (string, options): Summary of the event. Defaults to `No summary provided`. Maximum length is 1024 characters. + // - `class` (string, optional): Value indicating the class/type of the event. + Params SecurityDetectionsAPIRuleActionParams `json:"params"` +} + +// SecurityDetectionsAPINormalizedRuleError defines model for Security_Detections_API_NormalizedRuleError. +type SecurityDetectionsAPINormalizedRuleError struct { + ErrCode *SecurityDetectionsAPIBulkActionsDryRunErrCode `json:"err_code,omitempty"` + Message string `json:"message"` + Rules []SecurityDetectionsAPIRuleDetailsInError `json:"rules"` + StatusCode int `json:"status_code"` +} + +// SecurityDetectionsAPIOsqueryParams defines model for Security_Detections_API_OsqueryParams. +type SecurityDetectionsAPIOsqueryParams struct { + // EcsMapping Map Osquery results columns or static values to Elastic Common Schema (ECS) fields. Example: "ecs_mapping": {"process.pid": {"field": "pid"}} + EcsMapping *SecurityDetectionsAPIEcsMapping `json:"ecs_mapping,omitempty"` + + // PackId To specify a query pack, use the packId field. Example: "packId": "processes_elastic" + PackId *string `json:"pack_id,omitempty"` + Queries *[]SecurityDetectionsAPIOsqueryQuery `json:"queries,omitempty"` + + // Query To run a single query, use the query field and enter a SQL query. Example: "query": "SELECT * FROM processes;" + Query *string `json:"query,omitempty"` + + // SavedQueryId To run a saved query, use the saved_query_id field and specify the saved query ID. Example: "saved_query_id": "processes_elastic" + SavedQueryId *string `json:"saved_query_id,omitempty"` + + // Timeout A timeout period, in seconds, after which the query will stop running. Overwriting the default timeout allows you to support queries that require more time to complete. The default and minimum supported value is 60. The maximum supported value is 900. Example: "timeout": 120. + Timeout *float32 `json:"timeout,omitempty"` +} + +// SecurityDetectionsAPIOsqueryQuery defines model for Security_Detections_API_OsqueryQuery. +type SecurityDetectionsAPIOsqueryQuery struct { + // EcsMapping Map Osquery results columns or static values to Elastic Common Schema (ECS) fields. Example: "ecs_mapping": {"process.pid": {"field": "pid"}} + EcsMapping *SecurityDetectionsAPIEcsMapping `json:"ecs_mapping,omitempty"` + + // Id Query ID + Id string `json:"id"` + Platform *string `json:"platform,omitempty"` + + // Query Query to run + Query string `json:"query"` + Removed *bool `json:"removed,omitempty"` + Snapshot *bool `json:"snapshot,omitempty"` + + // Version Query version + Version *string `json:"version,omitempty"` +} + +// SecurityDetectionsAPIOsqueryResponseAction defines model for Security_Detections_API_OsqueryResponseAction. +type SecurityDetectionsAPIOsqueryResponseAction struct { + ActionTypeId SecurityDetectionsAPIOsqueryResponseActionActionTypeId `json:"action_type_id"` + Params SecurityDetectionsAPIOsqueryParams `json:"params"` +} + +// SecurityDetectionsAPIOsqueryResponseActionActionTypeId defines model for SecurityDetectionsAPIOsqueryResponseAction.ActionTypeId. +type SecurityDetectionsAPIOsqueryResponseActionActionTypeId string + +// SecurityDetectionsAPIPlatformErrorResponse defines model for Security_Detections_API_PlatformErrorResponse. +type SecurityDetectionsAPIPlatformErrorResponse struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode int `json:"statusCode"` +} + +// SecurityDetectionsAPIProcessesParams defines model for Security_Detections_API_ProcessesParams. +type SecurityDetectionsAPIProcessesParams struct { + // Command To run an endpoint response action, specify a value for the command field. Example: "command": "isolate" + Command SecurityDetectionsAPIProcessesParamsCommand `json:"command"` + + // Comment Add a note that explains or describes the action. You can find your comment in the response actions history log. Example: "comment": "Check processes" + Comment *string `json:"comment,omitempty"` + Config struct { + // Field Field to use instead of process.pid + Field string `json:"field"` + + // Overwrite Whether to overwrite field with process.pid + Overwrite *bool `json:"overwrite,omitempty"` + } `json:"config"` +} + +// SecurityDetectionsAPIProcessesParamsCommand To run an endpoint response action, specify a value for the command field. Example: "command": "isolate" +type SecurityDetectionsAPIProcessesParamsCommand string + +// SecurityDetectionsAPIQueryRule defines model for Security_Detections_API_QueryRule. +type SecurityDetectionsAPIQueryRule struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions []SecurityDetectionsAPIRuleAction `json:"actions"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author SecurityDetectionsAPIRuleAuthorArray `json:"author"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + CreatedAt time.Time `json:"created_at"` + CreatedBy string `json:"created_by"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled SecurityDetectionsAPIIsRuleEnabled `json:"enabled"` + ExceptionsList []SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list"` + + // ExecutionSummary Summary of the last execution of a rule. + // > info + // > This field is under development and its usage or schema may change + ExecutionSummary *SecurityDetectionsAPIRuleExecutionSummary `json:"execution_summary,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From SecurityDetectionsAPIRuleIntervalFrom `json:"from"` + + // Id A universally unique identifier + Id SecurityDetectionsAPIRuleObjectId `json:"id"` + + // Immutable This field determines whether the rule is a prebuilt Elastic rule. It will be replaced with the `rule_source` field. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Immutable SecurityDetectionsAPIIsRuleImmutable `json:"immutable"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval SecurityDetectionsAPIRuleInterval `json:"interval"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language SecurityDetectionsAPIKqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals SecurityDetectionsAPIMaxSignals `json:"max_signals"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References SecurityDetectionsAPIRuleReferenceArray `json:"references"` + RelatedIntegrations SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations"` + RequiredFields SecurityDetectionsAPIRequiredFieldArray `json:"required_fields"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // Revision The rule's revision number. + // + // It represents the version of rule's object in Kibana. It is set to `0` when the rule is installed or created and then gets incremented on each update. + // > info + // > Not all updates to any rule fields will increment the revision. Only those fields that are considered static `rule parameters` can trigger revision increments. For example, an update to a rule's query or index fields will increment the rule's revision by `1`. However, changes to dynamic or technical fields like enabled or execution_summary will not cause revision increments. + Revision SecurityDetectionsAPIRuleRevision `json:"revision"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId SecurityDetectionsAPIRuleSignatureId `json:"rule_id"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // RuleSource Discriminated union that determines whether the rule is internally sourced (created within the Kibana app) or has an external source, such as the Elastic Prebuilt rules repo. + RuleSource SecurityDetectionsAPIRuleSource `json:"rule_source"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup SecurityDetectionsAPISetupGuide `json:"setup"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping SecurityDetectionsAPISeverityMapping `json:"severity_mapping"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags SecurityDetectionsAPIRuleTagArray `json:"tags"` + Threat SecurityDetectionsAPIThreatArray `json:"threat"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To SecurityDetectionsAPIRuleIntervalTo `json:"to"` + + // Type Rule type + Type SecurityDetectionsAPIQueryRuleType `json:"type"` + UpdatedAt time.Time `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version SecurityDetectionsAPIRuleVersion `json:"version"` +} + +// SecurityDetectionsAPIQueryRuleType Rule type +type SecurityDetectionsAPIQueryRuleType string + +// SecurityDetectionsAPIQueryRuleCreateFields defines model for Security_Detections_API_QueryRuleCreateFields. +type SecurityDetectionsAPIQueryRuleCreateFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIQueryRuleCreateFieldsType `json:"type"` +} + +// SecurityDetectionsAPIQueryRuleCreateFieldsType Rule type +type SecurityDetectionsAPIQueryRuleCreateFieldsType string + +// SecurityDetectionsAPIQueryRuleCreateProps defines model for Security_Detections_API_QueryRuleCreateProps. +type SecurityDetectionsAPIQueryRuleCreateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIQueryRuleCreatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIQueryRuleCreatePropsType Rule type +type SecurityDetectionsAPIQueryRuleCreatePropsType string + +// SecurityDetectionsAPIQueryRuleDefaultableFields defines model for Security_Detections_API_QueryRuleDefaultableFields. +type SecurityDetectionsAPIQueryRuleDefaultableFields struct { + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` +} + +// SecurityDetectionsAPIQueryRuleOptionalFields defines model for Security_Detections_API_QueryRuleOptionalFields. +type SecurityDetectionsAPIQueryRuleOptionalFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` +} + +// SecurityDetectionsAPIQueryRulePatchFields defines model for Security_Detections_API_QueryRulePatchFields. +type SecurityDetectionsAPIQueryRulePatchFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPIQueryRulePatchFieldsType `json:"type,omitempty"` +} + +// SecurityDetectionsAPIQueryRulePatchFieldsType Rule type +type SecurityDetectionsAPIQueryRulePatchFieldsType string + +// SecurityDetectionsAPIQueryRulePatchProps defines model for Security_Detections_API_QueryRulePatchProps. +type SecurityDetectionsAPIQueryRulePatchProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description *SecurityDetectionsAPIRuleDescription `json:"description,omitempty"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name *SecurityDetectionsAPIRuleName `json:"name,omitempty"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore *SecurityDetectionsAPIRiskScore `json:"risk_score,omitempty"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity *SecurityDetectionsAPISeverity `json:"severity,omitempty"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPIQueryRulePatchPropsType `json:"type,omitempty"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIQueryRulePatchPropsType Rule type +type SecurityDetectionsAPIQueryRulePatchPropsType string + +// SecurityDetectionsAPIQueryRuleRequiredFields defines model for Security_Detections_API_QueryRuleRequiredFields. +type SecurityDetectionsAPIQueryRuleRequiredFields struct { + // Type Rule type + Type SecurityDetectionsAPIQueryRuleRequiredFieldsType `json:"type"` +} + +// SecurityDetectionsAPIQueryRuleRequiredFieldsType Rule type +type SecurityDetectionsAPIQueryRuleRequiredFieldsType string + +// SecurityDetectionsAPIQueryRuleResponseFields defines model for Security_Detections_API_QueryRuleResponseFields. +type SecurityDetectionsAPIQueryRuleResponseFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language SecurityDetectionsAPIKqlQueryLanguage `json:"language"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIQueryRuleResponseFieldsType `json:"type"` +} + +// SecurityDetectionsAPIQueryRuleResponseFieldsType Rule type +type SecurityDetectionsAPIQueryRuleResponseFieldsType string + +// SecurityDetectionsAPIQueryRuleUpdateProps defines model for Security_Detections_API_QueryRuleUpdateProps. +type SecurityDetectionsAPIQueryRuleUpdateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIQueryRuleUpdatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIQueryRuleUpdatePropsType Rule type +type SecurityDetectionsAPIQueryRuleUpdatePropsType string + +// SecurityDetectionsAPIRelatedIntegration Related integration is a potential dependency of a rule. It's assumed that if the user installs +// one of the related integrations of a rule, the rule might start to work properly because it will +// have source events (generated by this integration) potentially matching the rule's query. +// +// NOTE: Proper work is not guaranteed, because a related integration, if installed, can be +// configured differently or generate data that is not necessarily relevant for this rule. +// +// Related integration is a combination of a Fleet package and (optionally) one of the +// package's "integrations" that this package contains. It is represented by 3 properties: +// +// - `package`: name of the package (required, unique id) +// - `version`: version of the package (required, semver-compatible) +// - `integration`: name of the integration of this package (optional, id within the package) +// +// There are Fleet packages like `windows` that contain only one integration; in this case, +// `integration` should be unspecified. There are also packages like `aws` and `azure` that contain +// several integrations; in this case, `integration` should be specified. +type SecurityDetectionsAPIRelatedIntegration struct { + // Integration A string that does not contain only whitespace characters + Integration *SecurityDetectionsAPINonEmptyString `json:"integration,omitempty"` + + // Package A string that does not contain only whitespace characters + Package SecurityDetectionsAPINonEmptyString `json:"package"` + + // Version A string that does not contain only whitespace characters + Version SecurityDetectionsAPINonEmptyString `json:"version"` +} + +// SecurityDetectionsAPIRelatedIntegrationArray defines model for Security_Detections_API_RelatedIntegrationArray. +type SecurityDetectionsAPIRelatedIntegrationArray = []SecurityDetectionsAPIRelatedIntegration + +// SecurityDetectionsAPIRequiredField Describes an Elasticsearch field that is needed for the rule to function. +// +// Almost all types of Security rules check source event documents for a match to some kind of +// query or filter. If a document has certain field with certain values, then it's a match and +// the rule will generate an alert. +// +// Required field is an event field that must be present in the source indices of a given rule. +// +// @example +// +// const standardEcsField: RequiredField = { +// name: 'event.action', +// type: 'keyword', +// ecs: true, +// }; +// +// @example +// +// const nonEcsField: RequiredField = { +// name: 'winlog.event_data.AttributeLDAPDisplayName', +// type: 'keyword', +// ecs: false, +// }; +type SecurityDetectionsAPIRequiredField struct { + // Ecs Indicates whether the field is ECS-compliant. This property is only present in responses. Its value is computed based on field’s name and type. + Ecs bool `json:"ecs"` + + // Name Name of an Elasticsearch field + Name string `json:"name"` + + // Type Type of the Elasticsearch field + Type string `json:"type"` +} + +// SecurityDetectionsAPIRequiredFieldArray defines model for Security_Detections_API_RequiredFieldArray. +type SecurityDetectionsAPIRequiredFieldArray = []SecurityDetectionsAPIRequiredField + +// SecurityDetectionsAPIRequiredFieldInput Input parameters to create a RequiredField. Does not include the `ecs` field, because `ecs` is calculated on the backend based on the field name and type. +type SecurityDetectionsAPIRequiredFieldInput struct { + // Name Name of an Elasticsearch field + Name string `json:"name"` + + // Type Type of the Elasticsearch field + Type string `json:"type"` +} + +// SecurityDetectionsAPIResponseAction defines model for Security_Detections_API_ResponseAction. +type SecurityDetectionsAPIResponseAction struct { + union json.RawMessage +} + +// SecurityDetectionsAPIResponseFields defines model for Security_Detections_API_ResponseFields. +type SecurityDetectionsAPIResponseFields struct { + CreatedAt time.Time `json:"created_at"` + CreatedBy string `json:"created_by"` + + // ExecutionSummary Summary of the last execution of a rule. + // > info + // > This field is under development and its usage or schema may change + ExecutionSummary *SecurityDetectionsAPIRuleExecutionSummary `json:"execution_summary,omitempty"` + + // Id A universally unique identifier + Id SecurityDetectionsAPIRuleObjectId `json:"id"` + + // Immutable This field determines whether the rule is a prebuilt Elastic rule. It will be replaced with the `rule_source` field. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Immutable SecurityDetectionsAPIIsRuleImmutable `json:"immutable"` + RequiredFields SecurityDetectionsAPIRequiredFieldArray `json:"required_fields"` + + // Revision The rule's revision number. + // + // It represents the version of rule's object in Kibana. It is set to `0` when the rule is installed or created and then gets incremented on each update. + // > info + // > Not all updates to any rule fields will increment the revision. Only those fields that are considered static `rule parameters` can trigger revision increments. For example, an update to a rule's query or index fields will increment the rule's revision by `1`. However, changes to dynamic or technical fields like enabled or execution_summary will not cause revision increments. + Revision SecurityDetectionsAPIRuleRevision `json:"revision"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId SecurityDetectionsAPIRuleSignatureId `json:"rule_id"` + + // RuleSource Discriminated union that determines whether the rule is internally sourced (created within the Kibana app) or has an external source, such as the Elastic Prebuilt rules repo. + RuleSource SecurityDetectionsAPIRuleSource `json:"rule_source"` + UpdatedAt time.Time `json:"updated_at"` + UpdatedBy string `json:"updated_by"` +} + +// SecurityDetectionsAPIRiskScore A numerical representation of the alert's severity from 0 to 100, where: +// * `0` - `21` represents low severity +// * `22` - `47` represents medium severity +// * `48` - `73` represents high severity +// * `74` - `100` represents critical severity +type SecurityDetectionsAPIRiskScore = int + +// SecurityDetectionsAPIRiskScoreMapping Overrides generated alerts' risk_score with a value from the source event +type SecurityDetectionsAPIRiskScoreMapping = []struct { + // Field Source event field used to override the default `risk_score`. + Field string `json:"field"` + Operator SecurityDetectionsAPIRiskScoreMappingOperator `json:"operator"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore *SecurityDetectionsAPIRiskScore `json:"risk_score,omitempty"` + Value string `json:"value"` +} + +// SecurityDetectionsAPIRiskScoreMappingOperator defines model for SecurityDetectionsAPIRiskScoreMapping.Operator. +type SecurityDetectionsAPIRiskScoreMappingOperator string + +// SecurityDetectionsAPIRuleAction defines model for Security_Detections_API_RuleAction. +type SecurityDetectionsAPIRuleAction struct { + // ActionTypeId The action type used for sending notifications, can be: + // + // - `.slack` + // - `.slack_api` + // - `.email` + // - `.index` + // - `.pagerduty` + // - `.swimlane` + // - `.webhook` + // - `.servicenow` + // - `.servicenow-itom` + // - `.servicenow-sir` + // - `.jira` + // - `.resilient` + // - `.opsgenie` + // - `.teams` + // - `.torq` + // - `.tines` + // - `.d3security` + ActionTypeId string `json:"action_type_id"` + + // AlertsFilter Object containing an action’s conditional filters. + // + // - `timeframe` (object, optional): Object containing the time frame for when this action can be run. + // - `days` (array of integers, required): List of days of the week on which this action will be run. Days of the week are expressed as numbers between `1-7`, where `1` is Monday and `7` is Sunday. To select all days of the week, enter an empty array. + // - `hours` (object, required): The hours of the day during which this action will run. Hours of the day are expressed as two strings in the format `hh:mm` in `24` hour time. A start of `00:00` and an end of `24:00` means the action can run all day. + // - start (string, required): Start time in `hh:mm` format. + // - end (string, required): End time in `hh:mm` format. + // - `timezone` (string, required): An ISO timezone name, such as `Europe/Madrid` or `America/New_York`. Specific offsets such as `UTC` or `UTC+1` will also work, but lack built-in DST. + // - `query` (object, optional): Object containing a query filter which gets applied to an action and determines whether the action should run. + // - `kql` (string, required): A KQL string. + // - `filters` (array of objects, required): Array of filter objects, as defined in the `kbn-es-query` package. + AlertsFilter *SecurityDetectionsAPIRuleActionAlertsFilter `json:"alerts_filter,omitempty"` + + // Frequency The action frequency defines when the action runs (for example, only on rule execution or at specific time intervals). + Frequency *SecurityDetectionsAPIRuleActionFrequency `json:"frequency,omitempty"` + + // Group Optionally groups actions by use cases. Use `default` for alert notifications. + Group *SecurityDetectionsAPIRuleActionGroup `json:"group,omitempty"` + + // Id The connector ID. + Id SecurityDetectionsAPIRuleActionId `json:"id"` + + // Params Object containing the allowed connector fields, which varies according to the connector type. + // + // For Slack: + // + // - `message` (string, required): The notification message. + // + // For email: + // + // - `to`, `cc`, `bcc` (string): Email addresses to which the notifications are sent. At least one field must have a value. + // - `subject` (string, optional): Email subject line. + // - `message` (string, required): Email body text. + // + // For Webhook: + // + // - `body` (string, required): JSON payload. + // + // For PagerDuty: + // + // - `severity` (string, required): Severity of on the alert notification, can be: `Critical`, `Error`, `Warning` or `Info`. + // - `eventAction` (string, required): Event [action type](https://v2.developer.pagerduty.com/docs/events-api-v2#event-action), which can be `trigger`, `resolve`, or `acknowledge`. + // - `dedupKey` (string, optional): Groups alert notifications with the same PagerDuty alert. + // - `timestamp` (DateTime, optional): ISO-8601 format [timestamp](https://v2.developer.pagerduty.com/docs/types#datetime). + // - `component` (string, optional): Source machine component responsible for the event, for example `security-solution`. + // - `group` (string, optional): Enables logical grouping of service components. + // - `source` (string, optional): The affected system. Defaults to the Kibana saved object ID of the action. + // - `summary` (string, options): Summary of the event. Defaults to `No summary provided`. Maximum length is 1024 characters. + // - `class` (string, optional): Value indicating the class/type of the event. + Params SecurityDetectionsAPIRuleActionParams `json:"params"` + + // Uuid A string that does not contain only whitespace characters + Uuid *SecurityDetectionsAPINonEmptyString `json:"uuid,omitempty"` +} + +// SecurityDetectionsAPIRuleActionAlertsFilter Object containing an action’s conditional filters. +// +// - `timeframe` (object, optional): Object containing the time frame for when this action can be run. +// - `days` (array of integers, required): List of days of the week on which this action will be run. Days of the week are expressed as numbers between `1-7`, where `1` is Monday and `7` is Sunday. To select all days of the week, enter an empty array. +// - `hours` (object, required): The hours of the day during which this action will run. Hours of the day are expressed as two strings in the format `hh:mm` in `24` hour time. A start of `00:00` and an end of `24:00` means the action can run all day. +// - start (string, required): Start time in `hh:mm` format. +// - end (string, required): End time in `hh:mm` format. +// - `timezone` (string, required): An ISO timezone name, such as `Europe/Madrid` or `America/New_York`. Specific offsets such as `UTC` or `UTC+1` will also work, but lack built-in DST. +// +// - `query` (object, optional): Object containing a query filter which gets applied to an action and determines whether the action should run. +// - `kql` (string, required): A KQL string. +// - `filters` (array of objects, required): Array of filter objects, as defined in the `kbn-es-query` package. +type SecurityDetectionsAPIRuleActionAlertsFilter map[string]interface{} + +// SecurityDetectionsAPIRuleActionFrequency The action frequency defines when the action runs (for example, only on rule execution or at specific time intervals). +type SecurityDetectionsAPIRuleActionFrequency struct { + // NotifyWhen Defines how often rules run actions. + NotifyWhen SecurityDetectionsAPIRuleActionNotifyWhen `json:"notifyWhen"` + + // Summary Action summary indicates whether we will send a summary notification about all the generate alerts or notification per individual alert + Summary bool `json:"summary"` + + // Throttle Defines how often rule actions are taken. + Throttle SecurityDetectionsAPIRuleActionThrottle `json:"throttle"` +} + +// SecurityDetectionsAPIRuleActionGroup Optionally groups actions by use cases. Use `default` for alert notifications. +type SecurityDetectionsAPIRuleActionGroup = string + +// SecurityDetectionsAPIRuleActionId The connector ID. +type SecurityDetectionsAPIRuleActionId = string + +// SecurityDetectionsAPIRuleActionNotifyWhen Defines how often rules run actions. +type SecurityDetectionsAPIRuleActionNotifyWhen string + +// SecurityDetectionsAPIRuleActionParams Object containing the allowed connector fields, which varies according to the connector type. +// +// For Slack: +// +// - `message` (string, required): The notification message. +// +// For email: +// +// - `to`, `cc`, `bcc` (string): Email addresses to which the notifications are sent. At least one field must have a value. +// - `subject` (string, optional): Email subject line. +// - `message` (string, required): Email body text. +// +// For Webhook: +// +// - `body` (string, required): JSON payload. +// +// For PagerDuty: +// +// - `severity` (string, required): Severity of on the alert notification, can be: `Critical`, `Error`, `Warning` or `Info`. +// - `eventAction` (string, required): Event [action type](https://v2.developer.pagerduty.com/docs/events-api-v2#event-action), which can be `trigger`, `resolve`, or `acknowledge`. +// - `dedupKey` (string, optional): Groups alert notifications with the same PagerDuty alert. +// - `timestamp` (DateTime, optional): ISO-8601 format [timestamp](https://v2.developer.pagerduty.com/docs/types#datetime). +// - `component` (string, optional): Source machine component responsible for the event, for example `security-solution`. +// - `group` (string, optional): Enables logical grouping of service components. +// - `source` (string, optional): The affected system. Defaults to the Kibana saved object ID of the action. +// - `summary` (string, options): Summary of the event. Defaults to `No summary provided`. Maximum length is 1024 characters. +// - `class` (string, optional): Value indicating the class/type of the event. +type SecurityDetectionsAPIRuleActionParams map[string]interface{} + +// SecurityDetectionsAPIRuleActionThrottle Defines how often rule actions are taken. +type SecurityDetectionsAPIRuleActionThrottle struct { + union json.RawMessage +} + +// SecurityDetectionsAPIRuleActionThrottle0 defines model for SecurityDetectionsAPIRuleActionThrottle.0. +type SecurityDetectionsAPIRuleActionThrottle0 string + +// SecurityDetectionsAPIRuleActionThrottle1 Time interval in seconds, minutes, hours, or days. +type SecurityDetectionsAPIRuleActionThrottle1 = string + +// SecurityDetectionsAPIRuleAuthorArray The rule’s author. +type SecurityDetectionsAPIRuleAuthorArray = []string + +// SecurityDetectionsAPIRuleCreateProps defines model for Security_Detections_API_RuleCreateProps. +type SecurityDetectionsAPIRuleCreateProps struct { + union json.RawMessage +} + +// SecurityDetectionsAPIRuleDescription The rule’s description. +type SecurityDetectionsAPIRuleDescription = string + +// SecurityDetectionsAPIRuleDetailsInError defines model for Security_Detections_API_RuleDetailsInError. +type SecurityDetectionsAPIRuleDetailsInError struct { + Id string `json:"id"` + Name *string `json:"name,omitempty"` +} + +// SecurityDetectionsAPIRuleExceptionList Array of [exception containers](https://www.elastic.co/guide/en/security/current/exceptions-api-overview.html), which define exceptions that prevent the rule from generating alerts even when its other criteria are met. +type SecurityDetectionsAPIRuleExceptionList struct { + // Id ID of the exception container + Id string `json:"id"` + + // ListId List ID of the exception container + ListId string `json:"list_id"` + + // NamespaceType Determines the exceptions validity in rule's Kibana space + NamespaceType SecurityDetectionsAPIRuleExceptionListNamespaceType `json:"namespace_type"` + + // Type The exception type + Type SecurityDetectionsAPIExceptionListType `json:"type"` +} + +// SecurityDetectionsAPIRuleExceptionListNamespaceType Determines the exceptions validity in rule's Kibana space +type SecurityDetectionsAPIRuleExceptionListNamespaceType string + +// SecurityDetectionsAPIRuleExecutionMetrics defines model for Security_Detections_API_RuleExecutionMetrics. +type SecurityDetectionsAPIRuleExecutionMetrics struct { + // ExecutionGapDurationS Duration in seconds of execution gap + ExecutionGapDurationS *int `json:"execution_gap_duration_s,omitempty"` + + // FrozenIndicesQueriedCount Count of frozen indices queried during the rule execution. These indices could not be entirely excluded after applying the time range filter. + FrozenIndicesQueriedCount *int `json:"frozen_indices_queried_count,omitempty"` + + // GapRange Range of the execution gap + GapRange *struct { + // Gte Start date of the execution gap + Gte string `json:"gte"` + + // Lte End date of the execution gap + Lte string `json:"lte"` + } `json:"gap_range,omitempty"` + + // TotalEnrichmentDurationMs Total time spent enriching documents during current rule execution cycle + TotalEnrichmentDurationMs *int `json:"total_enrichment_duration_ms,omitempty"` + + // TotalIndexingDurationMs Total time spent indexing documents during current rule execution cycle + TotalIndexingDurationMs *int `json:"total_indexing_duration_ms,omitempty"` + + // TotalSearchDurationMs Total time spent performing ES searches as measured by Kibana; includes network latency and time spent serializing/deserializing request/response + TotalSearchDurationMs *int `json:"total_search_duration_ms,omitempty"` +} + +// SecurityDetectionsAPIRuleExecutionStatus Custom execution status of Security rules that is different from the status used in the Alerting Framework. We merge our custom status with the Framework's status to determine the resulting status of a rule. +// - going to run - @deprecated Replaced by the 'running' status but left for backwards compatibility with rule execution events already written to Event Log in the prior versions of Kibana. Don't use when writing rule status changes. +// - running - Rule execution started but not reached any intermediate or final status. +// - partial failure - Rule can partially fail for various reasons either in the middle of an execution (in this case we update its status right away) or in the end of it. So currently this status can be both intermediate and final at the same time. A typical reason for a partial failure: not all the indices that the rule searches over actually exist. +// - failed - Rule failed to execute due to unhandled exception or a reason defined in the business logic of its executor function. +// - succeeded - Rule executed successfully without any issues. Note: this status is just an indication of a rule's "health". The rule might or might not generate any alerts despite of it. +type SecurityDetectionsAPIRuleExecutionStatus string + +// SecurityDetectionsAPIRuleExecutionStatusOrder defines model for Security_Detections_API_RuleExecutionStatusOrder. +type SecurityDetectionsAPIRuleExecutionStatusOrder = int + +// SecurityDetectionsAPIRuleExecutionSummary Summary of the last execution of a rule. +// > info +// > This field is under development and its usage or schema may change +type SecurityDetectionsAPIRuleExecutionSummary struct { + LastExecution struct { + // Date Date of the last execution + Date time.Time `json:"date"` + Message string `json:"message"` + Metrics SecurityDetectionsAPIRuleExecutionMetrics `json:"metrics"` + + // Status Custom execution status of Security rules that is different from the status used in the Alerting Framework. We merge our custom status with the Framework's status to determine the resulting status of a rule. + // - going to run - @deprecated Replaced by the 'running' status but left for backwards compatibility with rule execution events already written to Event Log in the prior versions of Kibana. Don't use when writing rule status changes. + // - running - Rule execution started but not reached any intermediate or final status. + // - partial failure - Rule can partially fail for various reasons either in the middle of an execution (in this case we update its status right away) or in the end of it. So currently this status can be both intermediate and final at the same time. A typical reason for a partial failure: not all the indices that the rule searches over actually exist. + // - failed - Rule failed to execute due to unhandled exception or a reason defined in the business logic of its executor function. + // - succeeded - Rule executed successfully without any issues. Note: this status is just an indication of a rule's "health". The rule might or might not generate any alerts despite of it. + Status SecurityDetectionsAPIRuleExecutionStatus `json:"status"` + StatusOrder SecurityDetectionsAPIRuleExecutionStatusOrder `json:"status_order"` + } `json:"last_execution"` +} + +// SecurityDetectionsAPIRuleFalsePositiveArray String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. +type SecurityDetectionsAPIRuleFalsePositiveArray = []string + +// SecurityDetectionsAPIRuleFilterArray The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. +// > info +// > This field is not supported for ES|QL rules. +type SecurityDetectionsAPIRuleFilterArray = []interface{} + +// SecurityDetectionsAPIRuleInterval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). +type SecurityDetectionsAPIRuleInterval = string + +// SecurityDetectionsAPIRuleIntervalFrom Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). +type SecurityDetectionsAPIRuleIntervalFrom = string + +// SecurityDetectionsAPIRuleIntervalTo defines model for Security_Detections_API_RuleIntervalTo. +type SecurityDetectionsAPIRuleIntervalTo = string + +// SecurityDetectionsAPIRuleLicense The rule's license. +type SecurityDetectionsAPIRuleLicense = string + +// SecurityDetectionsAPIRuleMetadata Placeholder for metadata about the rule. +// > info +// > This field is overwritten when you save changes to the rule’s settings. +type SecurityDetectionsAPIRuleMetadata map[string]interface{} + +// SecurityDetectionsAPIRuleName A human-readable name for the rule. +type SecurityDetectionsAPIRuleName = string + +// SecurityDetectionsAPIRuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. +type SecurityDetectionsAPIRuleNameOverride = string + +// SecurityDetectionsAPIRuleObjectId A universally unique identifier +type SecurityDetectionsAPIRuleObjectId = SecurityDetectionsAPIUUID + +// SecurityDetectionsAPIRulePatchProps defines model for Security_Detections_API_RulePatchProps. +type SecurityDetectionsAPIRulePatchProps struct { + union json.RawMessage +} + +// SecurityDetectionsAPIRulePreviewLoggedRequest defines model for Security_Detections_API_RulePreviewLoggedRequest. +type SecurityDetectionsAPIRulePreviewLoggedRequest struct { + // Description A string that does not contain only whitespace characters + Description *SecurityDetectionsAPINonEmptyString `json:"description,omitempty"` + Duration *int `json:"duration,omitempty"` + + // Request A string that does not contain only whitespace characters + Request *SecurityDetectionsAPINonEmptyString `json:"request,omitempty"` + + // RequestType A string that does not contain only whitespace characters + RequestType *SecurityDetectionsAPINonEmptyString `json:"request_type,omitempty"` +} + +// SecurityDetectionsAPIRulePreviewLogs defines model for Security_Detections_API_RulePreviewLogs. +type SecurityDetectionsAPIRulePreviewLogs struct { + // Duration Execution duration in milliseconds + Duration int `json:"duration"` + Errors []SecurityDetectionsAPINonEmptyString `json:"errors"` + Requests *[]SecurityDetectionsAPIRulePreviewLoggedRequest `json:"requests,omitempty"` + + // StartedAt A string that does not contain only whitespace characters + StartedAt *SecurityDetectionsAPINonEmptyString `json:"startedAt,omitempty"` + Warnings []SecurityDetectionsAPINonEmptyString `json:"warnings"` +} + +// SecurityDetectionsAPIRulePreviewParams defines model for Security_Detections_API_RulePreviewParams. +type SecurityDetectionsAPIRulePreviewParams struct { + InvocationCount int `json:"invocationCount"` + TimeframeEnd time.Time `json:"timeframeEnd"` +} + +// SecurityDetectionsAPIRuleQuery [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. +// +// - For indicator match rules, only the query’s results are used to determine whether an alert is generated. +// - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. +type SecurityDetectionsAPIRuleQuery = string + +// SecurityDetectionsAPIRuleReferenceArray Array containing notes about or references to relevant information about the rule. Defaults to an empty array. +type SecurityDetectionsAPIRuleReferenceArray = []string + +// SecurityDetectionsAPIRuleResponse defines model for Security_Detections_API_RuleResponse. +type SecurityDetectionsAPIRuleResponse struct { + union json.RawMessage +} + +// SecurityDetectionsAPIRuleRevision The rule's revision number. +// +// It represents the version of rule's object in Kibana. It is set to `0` when the rule is installed or created and then gets incremented on each update. +// > info +// > Not all updates to any rule fields will increment the revision. Only those fields that are considered static `rule parameters` can trigger revision increments. For example, an update to a rule's query or index fields will increment the rule's revision by `1`. However, changes to dynamic or technical fields like enabled or execution_summary will not cause revision increments. +type SecurityDetectionsAPIRuleRevision = int + +// SecurityDetectionsAPIRuleSignatureId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. +type SecurityDetectionsAPIRuleSignatureId = string + +// SecurityDetectionsAPIRuleSource Discriminated union that determines whether the rule is internally sourced (created within the Kibana app) or has an external source, such as the Elastic Prebuilt rules repo. +type SecurityDetectionsAPIRuleSource struct { + union json.RawMessage +} + +// SecurityDetectionsAPIRuleTagArray String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. +type SecurityDetectionsAPIRuleTagArray = []string + +// SecurityDetectionsAPIRuleUpdateProps defines model for Security_Detections_API_RuleUpdateProps. +type SecurityDetectionsAPIRuleUpdateProps struct { + union json.RawMessage +} + +// SecurityDetectionsAPIRuleVersion The rule's version number. +// +// - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). +// - For custom rules it is set to `1` when the rule is created. +// > info +// > It is not incremented on each update. Compare this to the `revision` field. +type SecurityDetectionsAPIRuleVersion = int + +// SecurityDetectionsAPISavedObjectResolveAliasPurpose defines model for Security_Detections_API_SavedObjectResolveAliasPurpose. +type SecurityDetectionsAPISavedObjectResolveAliasPurpose string + +// SecurityDetectionsAPISavedObjectResolveAliasTargetId defines model for Security_Detections_API_SavedObjectResolveAliasTargetId. +type SecurityDetectionsAPISavedObjectResolveAliasTargetId = string + +// SecurityDetectionsAPISavedObjectResolveOutcome defines model for Security_Detections_API_SavedObjectResolveOutcome. +type SecurityDetectionsAPISavedObjectResolveOutcome string + +// SecurityDetectionsAPISavedQueryId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. +type SecurityDetectionsAPISavedQueryId = string + +// SecurityDetectionsAPISavedQueryRule defines model for Security_Detections_API_SavedQueryRule. +type SecurityDetectionsAPISavedQueryRule struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions []SecurityDetectionsAPIRuleAction `json:"actions"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author SecurityDetectionsAPIRuleAuthorArray `json:"author"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + CreatedAt time.Time `json:"created_at"` + CreatedBy string `json:"created_by"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled SecurityDetectionsAPIIsRuleEnabled `json:"enabled"` + ExceptionsList []SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list"` + + // ExecutionSummary Summary of the last execution of a rule. + // > info + // > This field is under development and its usage or schema may change + ExecutionSummary *SecurityDetectionsAPIRuleExecutionSummary `json:"execution_summary,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From SecurityDetectionsAPIRuleIntervalFrom `json:"from"` + + // Id A universally unique identifier + Id SecurityDetectionsAPIRuleObjectId `json:"id"` + + // Immutable This field determines whether the rule is a prebuilt Elastic rule. It will be replaced with the `rule_source` field. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Immutable SecurityDetectionsAPIIsRuleImmutable `json:"immutable"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval SecurityDetectionsAPIRuleInterval `json:"interval"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language SecurityDetectionsAPIKqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals SecurityDetectionsAPIMaxSignals `json:"max_signals"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References SecurityDetectionsAPIRuleReferenceArray `json:"references"` + RelatedIntegrations SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations"` + RequiredFields SecurityDetectionsAPIRequiredFieldArray `json:"required_fields"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // Revision The rule's revision number. + // + // It represents the version of rule's object in Kibana. It is set to `0` when the rule is installed or created and then gets incremented on each update. + // > info + // > Not all updates to any rule fields will increment the revision. Only those fields that are considered static `rule parameters` can trigger revision increments. For example, an update to a rule's query or index fields will increment the rule's revision by `1`. However, changes to dynamic or technical fields like enabled or execution_summary will not cause revision increments. + Revision SecurityDetectionsAPIRuleRevision `json:"revision"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId SecurityDetectionsAPIRuleSignatureId `json:"rule_id"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // RuleSource Discriminated union that determines whether the rule is internally sourced (created within the Kibana app) or has an external source, such as the Elastic Prebuilt rules repo. + RuleSource SecurityDetectionsAPIRuleSource `json:"rule_source"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId SecurityDetectionsAPISavedQueryId `json:"saved_id"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup SecurityDetectionsAPISetupGuide `json:"setup"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping SecurityDetectionsAPISeverityMapping `json:"severity_mapping"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags SecurityDetectionsAPIRuleTagArray `json:"tags"` + Threat SecurityDetectionsAPIThreatArray `json:"threat"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To SecurityDetectionsAPIRuleIntervalTo `json:"to"` + + // Type Rule type + Type SecurityDetectionsAPISavedQueryRuleType `json:"type"` + UpdatedAt time.Time `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version SecurityDetectionsAPIRuleVersion `json:"version"` +} + +// SecurityDetectionsAPISavedQueryRuleType Rule type +type SecurityDetectionsAPISavedQueryRuleType string + +// SecurityDetectionsAPISavedQueryRuleCreateFields defines model for Security_Detections_API_SavedQueryRuleCreateFields. +type SecurityDetectionsAPISavedQueryRuleCreateFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId SecurityDetectionsAPISavedQueryId `json:"saved_id"` + + // Type Rule type + Type SecurityDetectionsAPISavedQueryRuleCreateFieldsType `json:"type"` +} + +// SecurityDetectionsAPISavedQueryRuleCreateFieldsType Rule type +type SecurityDetectionsAPISavedQueryRuleCreateFieldsType string + +// SecurityDetectionsAPISavedQueryRuleCreateProps defines model for Security_Detections_API_SavedQueryRuleCreateProps. +type SecurityDetectionsAPISavedQueryRuleCreateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId SecurityDetectionsAPISavedQueryId `json:"saved_id"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPISavedQueryRuleCreatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPISavedQueryRuleCreatePropsType Rule type +type SecurityDetectionsAPISavedQueryRuleCreatePropsType string + +// SecurityDetectionsAPISavedQueryRuleDefaultableFields defines model for Security_Detections_API_SavedQueryRuleDefaultableFields. +type SecurityDetectionsAPISavedQueryRuleDefaultableFields struct { + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` +} + +// SecurityDetectionsAPISavedQueryRuleOptionalFields defines model for Security_Detections_API_SavedQueryRuleOptionalFields. +type SecurityDetectionsAPISavedQueryRuleOptionalFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` +} + +// SecurityDetectionsAPISavedQueryRulePatchFields defines model for Security_Detections_API_SavedQueryRulePatchFields. +type SecurityDetectionsAPISavedQueryRulePatchFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPISavedQueryRulePatchFieldsType `json:"type,omitempty"` +} + +// SecurityDetectionsAPISavedQueryRulePatchFieldsType Rule type +type SecurityDetectionsAPISavedQueryRulePatchFieldsType string + +// SecurityDetectionsAPISavedQueryRulePatchProps defines model for Security_Detections_API_SavedQueryRulePatchProps. +type SecurityDetectionsAPISavedQueryRulePatchProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description *SecurityDetectionsAPIRuleDescription `json:"description,omitempty"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name *SecurityDetectionsAPIRuleName `json:"name,omitempty"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore *SecurityDetectionsAPIRiskScore `json:"risk_score,omitempty"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity *SecurityDetectionsAPISeverity `json:"severity,omitempty"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPISavedQueryRulePatchPropsType `json:"type,omitempty"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPISavedQueryRulePatchPropsType Rule type +type SecurityDetectionsAPISavedQueryRulePatchPropsType string + +// SecurityDetectionsAPISavedQueryRuleRequiredFields defines model for Security_Detections_API_SavedQueryRuleRequiredFields. +type SecurityDetectionsAPISavedQueryRuleRequiredFields struct { + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId SecurityDetectionsAPISavedQueryId `json:"saved_id"` + + // Type Rule type + Type SecurityDetectionsAPISavedQueryRuleRequiredFieldsType `json:"type"` +} + +// SecurityDetectionsAPISavedQueryRuleRequiredFieldsType Rule type +type SecurityDetectionsAPISavedQueryRuleRequiredFieldsType string + +// SecurityDetectionsAPISavedQueryRuleResponseFields defines model for Security_Detections_API_SavedQueryRuleResponseFields. +type SecurityDetectionsAPISavedQueryRuleResponseFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language SecurityDetectionsAPIKqlQueryLanguage `json:"language"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId SecurityDetectionsAPISavedQueryId `json:"saved_id"` + + // Type Rule type + Type SecurityDetectionsAPISavedQueryRuleResponseFieldsType `json:"type"` +} + +// SecurityDetectionsAPISavedQueryRuleResponseFieldsType Rule type +type SecurityDetectionsAPISavedQueryRuleResponseFieldsType string + +// SecurityDetectionsAPISavedQueryRuleUpdateProps defines model for Security_Detections_API_SavedQueryRuleUpdateProps. +type SecurityDetectionsAPISavedQueryRuleUpdateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId SecurityDetectionsAPISavedQueryId `json:"saved_id"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPISavedQueryRuleUpdatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPISavedQueryRuleUpdatePropsType Rule type +type SecurityDetectionsAPISavedQueryRuleUpdatePropsType string + +// SecurityDetectionsAPISetAlertTags Object with list of tags to add and remove. +type SecurityDetectionsAPISetAlertTags struct { + // TagsToAdd List of keywords to organize related alerts into categories that you can filter and group. + TagsToAdd SecurityDetectionsAPIAlertTags `json:"tags_to_add"` + + // TagsToRemove List of keywords to organize related alerts into categories that you can filter and group. + TagsToRemove SecurityDetectionsAPIAlertTags `json:"tags_to_remove"` +} + +// SecurityDetectionsAPISetAlertsStatusByIds defines model for Security_Detections_API_SetAlertsStatusByIds. +type SecurityDetectionsAPISetAlertsStatusByIds struct { + // SignalIds List of alert ids. Use field `_id` on alert document or `kibana.alert.uuid`. Note: signals are a deprecated term for alerts. + SignalIds []string `json:"signal_ids"` + + // Status The status of an alert, which can be `open`, `acknowledged`, `in-progress`, or `closed`. + Status SecurityDetectionsAPIAlertStatus `json:"status"` +} + +// SecurityDetectionsAPISetAlertsStatusByQuery defines model for Security_Detections_API_SetAlertsStatusByQuery. +type SecurityDetectionsAPISetAlertsStatusByQuery struct { + Conflicts *SecurityDetectionsAPISetAlertsStatusByQueryConflicts `json:"conflicts,omitempty"` + Query map[string]interface{} `json:"query"` + + // Status The status of an alert, which can be `open`, `acknowledged`, `in-progress`, or `closed`. + Status SecurityDetectionsAPIAlertStatus `json:"status"` +} + +// SecurityDetectionsAPISetAlertsStatusByQueryConflicts defines model for SecurityDetectionsAPISetAlertsStatusByQuery.Conflicts. +type SecurityDetectionsAPISetAlertsStatusByQueryConflicts string + +// SecurityDetectionsAPISetupGuide Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. +type SecurityDetectionsAPISetupGuide = string + +// SecurityDetectionsAPISeverity Severity level of alerts produced by the rule, which must be one of the following: +// * `low`: Alerts that are of interest but generally not considered to be security incidents +// * `medium`: Alerts that require investigation +// * `high`: Alerts that require immediate investigation +// * `critical`: Alerts that indicate it is highly likely a security incident has occurred +type SecurityDetectionsAPISeverity string + +// SecurityDetectionsAPISeverityMapping Overrides generated alerts' severity with values from the source event +type SecurityDetectionsAPISeverityMapping = []struct { + // Field Source event field used to override the default `severity`. + Field string `json:"field"` + Operator SecurityDetectionsAPISeverityMappingOperator `json:"operator"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + Value string `json:"value"` +} + +// SecurityDetectionsAPISeverityMappingOperator defines model for SecurityDetectionsAPISeverityMapping.Operator. +type SecurityDetectionsAPISeverityMappingOperator string + +// SecurityDetectionsAPISiemErrorResponse defines model for Security_Detections_API_SiemErrorResponse. +type SecurityDetectionsAPISiemErrorResponse struct { + Message string `json:"message"` + StatusCode int `json:"status_code"` +} + +// SecurityDetectionsAPISkippedAlertsIndexMigration defines model for Security_Detections_API_SkippedAlertsIndexMigration. +type SecurityDetectionsAPISkippedAlertsIndexMigration struct { + Index string `json:"index"` +} + +// SecurityDetectionsAPISortOrder defines model for Security_Detections_API_SortOrder. +type SecurityDetectionsAPISortOrder string + +// SecurityDetectionsAPIThreat > info +// > Currently, only threats described using the MITRE ATT&CK™ framework are supported. +type SecurityDetectionsAPIThreat struct { + // Framework Relevant attack framework + Framework string `json:"framework"` + + // Tactic Object containing information on the attack type + Tactic SecurityDetectionsAPIThreatTactic `json:"tactic"` + + // Technique Array containing information on the attack techniques (optional) + Technique *[]SecurityDetectionsAPIThreatTechnique `json:"technique,omitempty"` +} + +// SecurityDetectionsAPIThreatArray defines model for Security_Detections_API_ThreatArray. +type SecurityDetectionsAPIThreatArray = []SecurityDetectionsAPIThreat + +// SecurityDetectionsAPIThreatFilters defines model for Security_Detections_API_ThreatFilters. +type SecurityDetectionsAPIThreatFilters = []interface{} + +// SecurityDetectionsAPIThreatIndex Elasticsearch indices used to check which field values generate alerts. +type SecurityDetectionsAPIThreatIndex = []string + +// SecurityDetectionsAPIThreatIndicatorPath Defines the path to the threat indicator in the indicator documents (optional) +type SecurityDetectionsAPIThreatIndicatorPath = string + +// SecurityDetectionsAPIThreatMapping Array of entries objects that define mappings between the source event fields and the values in the Elasticsearch threat index. Each entries object must contain these fields: +// +// - field: field from the event indices on which the rule runs +// - type: must be mapping +// - value: field from the Elasticsearch threat index +// +// You can use Boolean and and or logic to define the conditions for when matching fields and values generate alerts. Sibling entries objects are evaluated using or logic, whereas multiple entries in a single entries object use and logic. See Example of Threat Match rule which uses both `and` and `or` logic. +type SecurityDetectionsAPIThreatMapping = []struct { + Entries []SecurityDetectionsAPIThreatMappingEntry `json:"entries"` +} + +// SecurityDetectionsAPIThreatMappingEntry defines model for Security_Detections_API_ThreatMappingEntry. +type SecurityDetectionsAPIThreatMappingEntry struct { + // Field A string that does not contain only whitespace characters + Field SecurityDetectionsAPINonEmptyString `json:"field"` + Negate *bool `json:"negate,omitempty"` + Type SecurityDetectionsAPIThreatMappingEntryType `json:"type"` + + // Value A string that does not contain only whitespace characters + Value SecurityDetectionsAPINonEmptyString `json:"value"` +} + +// SecurityDetectionsAPIThreatMappingEntryType defines model for SecurityDetectionsAPIThreatMappingEntry.Type. +type SecurityDetectionsAPIThreatMappingEntryType string + +// SecurityDetectionsAPIThreatMatchRule defines model for Security_Detections_API_ThreatMatchRule. +type SecurityDetectionsAPIThreatMatchRule struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions []SecurityDetectionsAPIRuleAction `json:"actions"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author SecurityDetectionsAPIRuleAuthorArray `json:"author"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + ConcurrentSearches *SecurityDetectionsAPIConcurrentSearches `json:"concurrent_searches,omitempty"` + CreatedAt time.Time `json:"created_at"` + CreatedBy string `json:"created_by"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled SecurityDetectionsAPIIsRuleEnabled `json:"enabled"` + ExceptionsList []SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list"` + + // ExecutionSummary Summary of the last execution of a rule. + // > info + // > This field is under development and its usage or schema may change + ExecutionSummary *SecurityDetectionsAPIRuleExecutionSummary `json:"execution_summary,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From SecurityDetectionsAPIRuleIntervalFrom `json:"from"` + + // Id A universally unique identifier + Id SecurityDetectionsAPIRuleObjectId `json:"id"` + + // Immutable This field determines whether the rule is a prebuilt Elastic rule. It will be replaced with the `rule_source` field. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Immutable SecurityDetectionsAPIIsRuleImmutable `json:"immutable"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval SecurityDetectionsAPIRuleInterval `json:"interval"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + ItemsPerSearch *SecurityDetectionsAPIItemsPerSearch `json:"items_per_search,omitempty"` + Language SecurityDetectionsAPIKqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals SecurityDetectionsAPIMaxSignals `json:"max_signals"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References SecurityDetectionsAPIRuleReferenceArray `json:"references"` + RelatedIntegrations SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations"` + RequiredFields SecurityDetectionsAPIRequiredFieldArray `json:"required_fields"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // Revision The rule's revision number. + // + // It represents the version of rule's object in Kibana. It is set to `0` when the rule is installed or created and then gets incremented on each update. + // > info + // > Not all updates to any rule fields will increment the revision. Only those fields that are considered static `rule parameters` can trigger revision increments. For example, an update to a rule's query or index fields will increment the rule's revision by `1`. However, changes to dynamic or technical fields like enabled or execution_summary will not cause revision increments. + Revision SecurityDetectionsAPIRuleRevision `json:"revision"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId SecurityDetectionsAPIRuleSignatureId `json:"rule_id"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // RuleSource Discriminated union that determines whether the rule is internally sourced (created within the Kibana app) or has an external source, such as the Elastic Prebuilt rules repo. + RuleSource SecurityDetectionsAPIRuleSource `json:"rule_source"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup SecurityDetectionsAPISetupGuide `json:"setup"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping SecurityDetectionsAPISeverityMapping `json:"severity_mapping"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags SecurityDetectionsAPIRuleTagArray `json:"tags"` + Threat SecurityDetectionsAPIThreatArray `json:"threat"` + ThreatFilters *SecurityDetectionsAPIThreatFilters `json:"threat_filters,omitempty"` + + // ThreatIndex Elasticsearch indices used to check which field values generate alerts. + ThreatIndex SecurityDetectionsAPIThreatIndex `json:"threat_index"` + + // ThreatIndicatorPath Defines the path to the threat indicator in the indicator documents (optional) + ThreatIndicatorPath *SecurityDetectionsAPIThreatIndicatorPath `json:"threat_indicator_path,omitempty"` + ThreatLanguage *SecurityDetectionsAPIKqlQueryLanguage `json:"threat_language,omitempty"` + + // ThreatMapping Array of entries objects that define mappings between the source event fields and the values in the Elasticsearch threat index. Each entries object must contain these fields: + // + // - field: field from the event indices on which the rule runs + // - type: must be mapping + // - value: field from the Elasticsearch threat index + // + // You can use Boolean and and or logic to define the conditions for when matching fields and values generate alerts. Sibling entries objects are evaluated using or logic, whereas multiple entries in a single entries object use and logic. See Example of Threat Match rule which uses both `and` and `or` logic. + ThreatMapping SecurityDetectionsAPIThreatMapping `json:"threat_mapping"` + + // ThreatQuery Query used to determine which fields in the Elasticsearch index are used for generating alerts. + ThreatQuery SecurityDetectionsAPIThreatQuery `json:"threat_query"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To SecurityDetectionsAPIRuleIntervalTo `json:"to"` + + // Type Rule type + Type SecurityDetectionsAPIThreatMatchRuleType `json:"type"` + UpdatedAt time.Time `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version SecurityDetectionsAPIRuleVersion `json:"version"` +} + +// SecurityDetectionsAPIThreatMatchRuleType Rule type +type SecurityDetectionsAPIThreatMatchRuleType string + +// SecurityDetectionsAPIThreatMatchRuleCreateFields defines model for Security_Detections_API_ThreatMatchRuleCreateFields. +type SecurityDetectionsAPIThreatMatchRuleCreateFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + ConcurrentSearches *SecurityDetectionsAPIConcurrentSearches `json:"concurrent_searches,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + ItemsPerSearch *SecurityDetectionsAPIItemsPerSearch `json:"items_per_search,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + ThreatFilters *SecurityDetectionsAPIThreatFilters `json:"threat_filters,omitempty"` + + // ThreatIndex Elasticsearch indices used to check which field values generate alerts. + ThreatIndex SecurityDetectionsAPIThreatIndex `json:"threat_index"` + + // ThreatIndicatorPath Defines the path to the threat indicator in the indicator documents (optional) + ThreatIndicatorPath *SecurityDetectionsAPIThreatIndicatorPath `json:"threat_indicator_path,omitempty"` + ThreatLanguage *SecurityDetectionsAPIKqlQueryLanguage `json:"threat_language,omitempty"` + + // ThreatMapping Array of entries objects that define mappings between the source event fields and the values in the Elasticsearch threat index. Each entries object must contain these fields: + // + // - field: field from the event indices on which the rule runs + // - type: must be mapping + // - value: field from the Elasticsearch threat index + // + // You can use Boolean and and or logic to define the conditions for when matching fields and values generate alerts. Sibling entries objects are evaluated using or logic, whereas multiple entries in a single entries object use and logic. See Example of Threat Match rule which uses both `and` and `or` logic. + ThreatMapping SecurityDetectionsAPIThreatMapping `json:"threat_mapping"` + + // ThreatQuery Query used to determine which fields in the Elasticsearch index are used for generating alerts. + ThreatQuery SecurityDetectionsAPIThreatQuery `json:"threat_query"` + + // Type Rule type + Type SecurityDetectionsAPIThreatMatchRuleCreateFieldsType `json:"type"` +} + +// SecurityDetectionsAPIThreatMatchRuleCreateFieldsType Rule type +type SecurityDetectionsAPIThreatMatchRuleCreateFieldsType string + +// SecurityDetectionsAPIThreatMatchRuleCreateProps defines model for Security_Detections_API_ThreatMatchRuleCreateProps. +type SecurityDetectionsAPIThreatMatchRuleCreateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + ConcurrentSearches *SecurityDetectionsAPIConcurrentSearches `json:"concurrent_searches,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + ItemsPerSearch *SecurityDetectionsAPIItemsPerSearch `json:"items_per_search,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + ThreatFilters *SecurityDetectionsAPIThreatFilters `json:"threat_filters,omitempty"` + + // ThreatIndex Elasticsearch indices used to check which field values generate alerts. + ThreatIndex SecurityDetectionsAPIThreatIndex `json:"threat_index"` + + // ThreatIndicatorPath Defines the path to the threat indicator in the indicator documents (optional) + ThreatIndicatorPath *SecurityDetectionsAPIThreatIndicatorPath `json:"threat_indicator_path,omitempty"` + ThreatLanguage *SecurityDetectionsAPIKqlQueryLanguage `json:"threat_language,omitempty"` + + // ThreatMapping Array of entries objects that define mappings between the source event fields and the values in the Elasticsearch threat index. Each entries object must contain these fields: + // + // - field: field from the event indices on which the rule runs + // - type: must be mapping + // - value: field from the Elasticsearch threat index + // + // You can use Boolean and and or logic to define the conditions for when matching fields and values generate alerts. Sibling entries objects are evaluated using or logic, whereas multiple entries in a single entries object use and logic. See Example of Threat Match rule which uses both `and` and `or` logic. + ThreatMapping SecurityDetectionsAPIThreatMapping `json:"threat_mapping"` + + // ThreatQuery Query used to determine which fields in the Elasticsearch index are used for generating alerts. + ThreatQuery SecurityDetectionsAPIThreatQuery `json:"threat_query"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIThreatMatchRuleCreatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIThreatMatchRuleCreatePropsType Rule type +type SecurityDetectionsAPIThreatMatchRuleCreatePropsType string + +// SecurityDetectionsAPIThreatMatchRuleDefaultableFields defines model for Security_Detections_API_ThreatMatchRuleDefaultableFields. +type SecurityDetectionsAPIThreatMatchRuleDefaultableFields struct { + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` +} + +// SecurityDetectionsAPIThreatMatchRuleOptionalFields defines model for Security_Detections_API_ThreatMatchRuleOptionalFields. +type SecurityDetectionsAPIThreatMatchRuleOptionalFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + ConcurrentSearches *SecurityDetectionsAPIConcurrentSearches `json:"concurrent_searches,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + ItemsPerSearch *SecurityDetectionsAPIItemsPerSearch `json:"items_per_search,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + ThreatFilters *SecurityDetectionsAPIThreatFilters `json:"threat_filters,omitempty"` + + // ThreatIndicatorPath Defines the path to the threat indicator in the indicator documents (optional) + ThreatIndicatorPath *SecurityDetectionsAPIThreatIndicatorPath `json:"threat_indicator_path,omitempty"` + ThreatLanguage *SecurityDetectionsAPIKqlQueryLanguage `json:"threat_language,omitempty"` +} + +// SecurityDetectionsAPIThreatMatchRulePatchFields defines model for Security_Detections_API_ThreatMatchRulePatchFields. +type SecurityDetectionsAPIThreatMatchRulePatchFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + ConcurrentSearches *SecurityDetectionsAPIConcurrentSearches `json:"concurrent_searches,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + ItemsPerSearch *SecurityDetectionsAPIItemsPerSearch `json:"items_per_search,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + ThreatFilters *SecurityDetectionsAPIThreatFilters `json:"threat_filters,omitempty"` + + // ThreatIndex Elasticsearch indices used to check which field values generate alerts. + ThreatIndex *SecurityDetectionsAPIThreatIndex `json:"threat_index,omitempty"` + + // ThreatIndicatorPath Defines the path to the threat indicator in the indicator documents (optional) + ThreatIndicatorPath *SecurityDetectionsAPIThreatIndicatorPath `json:"threat_indicator_path,omitempty"` + ThreatLanguage *SecurityDetectionsAPIKqlQueryLanguage `json:"threat_language,omitempty"` + + // ThreatMapping Array of entries objects that define mappings between the source event fields and the values in the Elasticsearch threat index. Each entries object must contain these fields: + // + // - field: field from the event indices on which the rule runs + // - type: must be mapping + // - value: field from the Elasticsearch threat index + // + // You can use Boolean and and or logic to define the conditions for when matching fields and values generate alerts. Sibling entries objects are evaluated using or logic, whereas multiple entries in a single entries object use and logic. See Example of Threat Match rule which uses both `and` and `or` logic. + ThreatMapping *SecurityDetectionsAPIThreatMapping `json:"threat_mapping,omitempty"` + + // ThreatQuery Query used to determine which fields in the Elasticsearch index are used for generating alerts. + ThreatQuery *SecurityDetectionsAPIThreatQuery `json:"threat_query,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPIThreatMatchRulePatchFieldsType `json:"type,omitempty"` +} + +// SecurityDetectionsAPIThreatMatchRulePatchFieldsType Rule type +type SecurityDetectionsAPIThreatMatchRulePatchFieldsType string + +// SecurityDetectionsAPIThreatMatchRulePatchProps defines model for Security_Detections_API_ThreatMatchRulePatchProps. +type SecurityDetectionsAPIThreatMatchRulePatchProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + ConcurrentSearches *SecurityDetectionsAPIConcurrentSearches `json:"concurrent_searches,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description *SecurityDetectionsAPIRuleDescription `json:"description,omitempty"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + ItemsPerSearch *SecurityDetectionsAPIItemsPerSearch `json:"items_per_search,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name *SecurityDetectionsAPIRuleName `json:"name,omitempty"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore *SecurityDetectionsAPIRiskScore `json:"risk_score,omitempty"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity *SecurityDetectionsAPISeverity `json:"severity,omitempty"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + ThreatFilters *SecurityDetectionsAPIThreatFilters `json:"threat_filters,omitempty"` + + // ThreatIndex Elasticsearch indices used to check which field values generate alerts. + ThreatIndex *SecurityDetectionsAPIThreatIndex `json:"threat_index,omitempty"` + + // ThreatIndicatorPath Defines the path to the threat indicator in the indicator documents (optional) + ThreatIndicatorPath *SecurityDetectionsAPIThreatIndicatorPath `json:"threat_indicator_path,omitempty"` + ThreatLanguage *SecurityDetectionsAPIKqlQueryLanguage `json:"threat_language,omitempty"` + + // ThreatMapping Array of entries objects that define mappings between the source event fields and the values in the Elasticsearch threat index. Each entries object must contain these fields: + // + // - field: field from the event indices on which the rule runs + // - type: must be mapping + // - value: field from the Elasticsearch threat index + // + // You can use Boolean and and or logic to define the conditions for when matching fields and values generate alerts. Sibling entries objects are evaluated using or logic, whereas multiple entries in a single entries object use and logic. See Example of Threat Match rule which uses both `and` and `or` logic. + ThreatMapping *SecurityDetectionsAPIThreatMapping `json:"threat_mapping,omitempty"` + + // ThreatQuery Query used to determine which fields in the Elasticsearch index are used for generating alerts. + ThreatQuery *SecurityDetectionsAPIThreatQuery `json:"threat_query,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPIThreatMatchRulePatchPropsType `json:"type,omitempty"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIThreatMatchRulePatchPropsType Rule type +type SecurityDetectionsAPIThreatMatchRulePatchPropsType string + +// SecurityDetectionsAPIThreatMatchRuleRequiredFields defines model for Security_Detections_API_ThreatMatchRuleRequiredFields. +type SecurityDetectionsAPIThreatMatchRuleRequiredFields struct { + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // ThreatIndex Elasticsearch indices used to check which field values generate alerts. + ThreatIndex SecurityDetectionsAPIThreatIndex `json:"threat_index"` + + // ThreatMapping Array of entries objects that define mappings between the source event fields and the values in the Elasticsearch threat index. Each entries object must contain these fields: + // + // - field: field from the event indices on which the rule runs + // - type: must be mapping + // - value: field from the Elasticsearch threat index + // + // You can use Boolean and and or logic to define the conditions for when matching fields and values generate alerts. Sibling entries objects are evaluated using or logic, whereas multiple entries in a single entries object use and logic. See Example of Threat Match rule which uses both `and` and `or` logic. + ThreatMapping SecurityDetectionsAPIThreatMapping `json:"threat_mapping"` + + // ThreatQuery Query used to determine which fields in the Elasticsearch index are used for generating alerts. + ThreatQuery SecurityDetectionsAPIThreatQuery `json:"threat_query"` + + // Type Rule type + Type SecurityDetectionsAPIThreatMatchRuleRequiredFieldsType `json:"type"` +} + +// SecurityDetectionsAPIThreatMatchRuleRequiredFieldsType Rule type +type SecurityDetectionsAPIThreatMatchRuleRequiredFieldsType string + +// SecurityDetectionsAPIThreatMatchRuleResponseFields defines model for Security_Detections_API_ThreatMatchRuleResponseFields. +type SecurityDetectionsAPIThreatMatchRuleResponseFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + ConcurrentSearches *SecurityDetectionsAPIConcurrentSearches `json:"concurrent_searches,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + ItemsPerSearch *SecurityDetectionsAPIItemsPerSearch `json:"items_per_search,omitempty"` + Language SecurityDetectionsAPIKqlQueryLanguage `json:"language"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + ThreatFilters *SecurityDetectionsAPIThreatFilters `json:"threat_filters,omitempty"` + + // ThreatIndex Elasticsearch indices used to check which field values generate alerts. + ThreatIndex SecurityDetectionsAPIThreatIndex `json:"threat_index"` + + // ThreatIndicatorPath Defines the path to the threat indicator in the indicator documents (optional) + ThreatIndicatorPath *SecurityDetectionsAPIThreatIndicatorPath `json:"threat_indicator_path,omitempty"` + ThreatLanguage *SecurityDetectionsAPIKqlQueryLanguage `json:"threat_language,omitempty"` + + // ThreatMapping Array of entries objects that define mappings between the source event fields and the values in the Elasticsearch threat index. Each entries object must contain these fields: + // + // - field: field from the event indices on which the rule runs + // - type: must be mapping + // - value: field from the Elasticsearch threat index + // + // You can use Boolean and and or logic to define the conditions for when matching fields and values generate alerts. Sibling entries objects are evaluated using or logic, whereas multiple entries in a single entries object use and logic. See Example of Threat Match rule which uses both `and` and `or` logic. + ThreatMapping SecurityDetectionsAPIThreatMapping `json:"threat_mapping"` + + // ThreatQuery Query used to determine which fields in the Elasticsearch index are used for generating alerts. + ThreatQuery SecurityDetectionsAPIThreatQuery `json:"threat_query"` + + // Type Rule type + Type SecurityDetectionsAPIThreatMatchRuleResponseFieldsType `json:"type"` +} + +// SecurityDetectionsAPIThreatMatchRuleResponseFieldsType Rule type +type SecurityDetectionsAPIThreatMatchRuleResponseFieldsType string + +// SecurityDetectionsAPIThreatMatchRuleUpdateProps defines model for Security_Detections_API_ThreatMatchRuleUpdateProps. +type SecurityDetectionsAPIThreatMatchRuleUpdateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + ConcurrentSearches *SecurityDetectionsAPIConcurrentSearches `json:"concurrent_searches,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + ItemsPerSearch *SecurityDetectionsAPIItemsPerSearch `json:"items_per_search,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + ThreatFilters *SecurityDetectionsAPIThreatFilters `json:"threat_filters,omitempty"` + + // ThreatIndex Elasticsearch indices used to check which field values generate alerts. + ThreatIndex SecurityDetectionsAPIThreatIndex `json:"threat_index"` + + // ThreatIndicatorPath Defines the path to the threat indicator in the indicator documents (optional) + ThreatIndicatorPath *SecurityDetectionsAPIThreatIndicatorPath `json:"threat_indicator_path,omitempty"` + ThreatLanguage *SecurityDetectionsAPIKqlQueryLanguage `json:"threat_language,omitempty"` + + // ThreatMapping Array of entries objects that define mappings between the source event fields and the values in the Elasticsearch threat index. Each entries object must contain these fields: + // + // - field: field from the event indices on which the rule runs + // - type: must be mapping + // - value: field from the Elasticsearch threat index + // + // You can use Boolean and and or logic to define the conditions for when matching fields and values generate alerts. Sibling entries objects are evaluated using or logic, whereas multiple entries in a single entries object use and logic. See Example of Threat Match rule which uses both `and` and `or` logic. + ThreatMapping SecurityDetectionsAPIThreatMapping `json:"threat_mapping"` + + // ThreatQuery Query used to determine which fields in the Elasticsearch index are used for generating alerts. + ThreatQuery SecurityDetectionsAPIThreatQuery `json:"threat_query"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIThreatMatchRuleUpdatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIThreatMatchRuleUpdatePropsType Rule type +type SecurityDetectionsAPIThreatMatchRuleUpdatePropsType string + +// SecurityDetectionsAPIThreatQuery Query used to determine which fields in the Elasticsearch index are used for generating alerts. +type SecurityDetectionsAPIThreatQuery = string + +// SecurityDetectionsAPIThreatSubtechnique defines model for Security_Detections_API_ThreatSubtechnique. +type SecurityDetectionsAPIThreatSubtechnique struct { + // Id Subtechnique ID + Id string `json:"id"` + + // Name Subtechnique name + Name string `json:"name"` + + // Reference Subtechnique reference + Reference string `json:"reference"` +} + +// SecurityDetectionsAPIThreatTactic Object containing information on the attack type +type SecurityDetectionsAPIThreatTactic struct { + // Id Tactic ID + Id string `json:"id"` + + // Name Tactic name + Name string `json:"name"` + + // Reference Tactic reference + Reference string `json:"reference"` +} + +// SecurityDetectionsAPIThreatTechnique defines model for Security_Detections_API_ThreatTechnique. +type SecurityDetectionsAPIThreatTechnique struct { + // Id Technique ID + Id string `json:"id"` + + // Name Technique name + Name string `json:"name"` + + // Reference Technique reference + Reference string `json:"reference"` + + // Subtechnique Array containing more specific information on the attack technique. + Subtechnique *[]SecurityDetectionsAPIThreatSubtechnique `json:"subtechnique,omitempty"` +} + +// SecurityDetectionsAPIThreshold defines model for Security_Detections_API_Threshold. +type SecurityDetectionsAPIThreshold struct { + // Cardinality The field on which the cardinality is applied. + Cardinality *SecurityDetectionsAPIThresholdCardinality `json:"cardinality,omitempty"` + + // Field The field on which the threshold is applied. If you specify an empty array ([]), alerts are generated when the query returns at least the number of results specified in the value field. + Field SecurityDetectionsAPIThresholdField `json:"field"` + + // Value The threshold value from which an alert is generated. + Value SecurityDetectionsAPIThresholdValue `json:"value"` +} + +// SecurityDetectionsAPIThresholdAlertSuppression Defines alert suppression configuration. +type SecurityDetectionsAPIThresholdAlertSuppression struct { + Duration SecurityDetectionsAPIAlertSuppressionDuration `json:"duration"` +} + +// SecurityDetectionsAPIThresholdCardinality The field on which the cardinality is applied. +type SecurityDetectionsAPIThresholdCardinality = []struct { + // Field The field on which to calculate and compare the cardinality. + Field string `json:"field"` + + // Value The threshold value from which an alert is generated based on unique number of values of cardinality.field. + Value int `json:"value"` +} + +// SecurityDetectionsAPIThresholdField The field on which the threshold is applied. If you specify an empty array ([]), alerts are generated when the query returns at least the number of results specified in the value field. +type SecurityDetectionsAPIThresholdField struct { + union json.RawMessage +} + +// SecurityDetectionsAPIThresholdField0 defines model for . +type SecurityDetectionsAPIThresholdField0 = string + +// SecurityDetectionsAPIThresholdField1 defines model for . +type SecurityDetectionsAPIThresholdField1 = []string + +// SecurityDetectionsAPIThresholdRule defines model for Security_Detections_API_ThresholdRule. +type SecurityDetectionsAPIThresholdRule struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions []SecurityDetectionsAPIRuleAction `json:"actions"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIThresholdAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author SecurityDetectionsAPIRuleAuthorArray `json:"author"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + CreatedAt time.Time `json:"created_at"` + CreatedBy string `json:"created_by"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled SecurityDetectionsAPIIsRuleEnabled `json:"enabled"` + ExceptionsList []SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list"` + + // ExecutionSummary Summary of the last execution of a rule. + // > info + // > This field is under development and its usage or schema may change + ExecutionSummary *SecurityDetectionsAPIRuleExecutionSummary `json:"execution_summary,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From SecurityDetectionsAPIRuleIntervalFrom `json:"from"` + + // Id A universally unique identifier + Id SecurityDetectionsAPIRuleObjectId `json:"id"` + + // Immutable This field determines whether the rule is a prebuilt Elastic rule. It will be replaced with the `rule_source` field. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Immutable SecurityDetectionsAPIIsRuleImmutable `json:"immutable"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval SecurityDetectionsAPIRuleInterval `json:"interval"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language SecurityDetectionsAPIKqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals SecurityDetectionsAPIMaxSignals `json:"max_signals"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References SecurityDetectionsAPIRuleReferenceArray `json:"references"` + RelatedIntegrations SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations"` + RequiredFields SecurityDetectionsAPIRequiredFieldArray `json:"required_fields"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // Revision The rule's revision number. + // + // It represents the version of rule's object in Kibana. It is set to `0` when the rule is installed or created and then gets incremented on each update. + // > info + // > Not all updates to any rule fields will increment the revision. Only those fields that are considered static `rule parameters` can trigger revision increments. For example, an update to a rule's query or index fields will increment the rule's revision by `1`. However, changes to dynamic or technical fields like enabled or execution_summary will not cause revision increments. + Revision SecurityDetectionsAPIRuleRevision `json:"revision"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId SecurityDetectionsAPIRuleSignatureId `json:"rule_id"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // RuleSource Discriminated union that determines whether the rule is internally sourced (created within the Kibana app) or has an external source, such as the Elastic Prebuilt rules repo. + RuleSource SecurityDetectionsAPIRuleSource `json:"rule_source"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup SecurityDetectionsAPISetupGuide `json:"setup"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping SecurityDetectionsAPISeverityMapping `json:"severity_mapping"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags SecurityDetectionsAPIRuleTagArray `json:"tags"` + Threat SecurityDetectionsAPIThreatArray `json:"threat"` + Threshold SecurityDetectionsAPIThreshold `json:"threshold"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To SecurityDetectionsAPIRuleIntervalTo `json:"to"` + + // Type Rule type + Type SecurityDetectionsAPIThresholdRuleType `json:"type"` + UpdatedAt time.Time `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version SecurityDetectionsAPIRuleVersion `json:"version"` +} + +// SecurityDetectionsAPIThresholdRuleType Rule type +type SecurityDetectionsAPIThresholdRuleType string + +// SecurityDetectionsAPIThresholdRuleCreateFields defines model for Security_Detections_API_ThresholdRuleCreateFields. +type SecurityDetectionsAPIThresholdRuleCreateFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIThresholdAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + Threshold SecurityDetectionsAPIThreshold `json:"threshold"` + + // Type Rule type + Type SecurityDetectionsAPIThresholdRuleCreateFieldsType `json:"type"` +} + +// SecurityDetectionsAPIThresholdRuleCreateFieldsType Rule type +type SecurityDetectionsAPIThresholdRuleCreateFieldsType string + +// SecurityDetectionsAPIThresholdRuleCreateProps defines model for Security_Detections_API_ThresholdRuleCreateProps. +type SecurityDetectionsAPIThresholdRuleCreateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIThresholdAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + Threshold SecurityDetectionsAPIThreshold `json:"threshold"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIThresholdRuleCreatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIThresholdRuleCreatePropsType Rule type +type SecurityDetectionsAPIThresholdRuleCreatePropsType string + +// SecurityDetectionsAPIThresholdRuleDefaultableFields defines model for Security_Detections_API_ThresholdRuleDefaultableFields. +type SecurityDetectionsAPIThresholdRuleDefaultableFields struct { + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` +} + +// SecurityDetectionsAPIThresholdRuleOptionalFields defines model for Security_Detections_API_ThresholdRuleOptionalFields. +type SecurityDetectionsAPIThresholdRuleOptionalFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIThresholdAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` +} + +// SecurityDetectionsAPIThresholdRulePatchFields defines model for Security_Detections_API_ThresholdRulePatchFields. +type SecurityDetectionsAPIThresholdRulePatchFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIThresholdAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + Threshold *SecurityDetectionsAPIThreshold `json:"threshold,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPIThresholdRulePatchFieldsType `json:"type,omitempty"` +} + +// SecurityDetectionsAPIThresholdRulePatchFieldsType Rule type +type SecurityDetectionsAPIThresholdRulePatchFieldsType string + +// SecurityDetectionsAPIThresholdRulePatchProps defines model for Security_Detections_API_ThresholdRulePatchProps. +type SecurityDetectionsAPIThresholdRulePatchProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIThresholdAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description *SecurityDetectionsAPIRuleDescription `json:"description,omitempty"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name *SecurityDetectionsAPIRuleName `json:"name,omitempty"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore *SecurityDetectionsAPIRiskScore `json:"risk_score,omitempty"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity *SecurityDetectionsAPISeverity `json:"severity,omitempty"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + Threshold *SecurityDetectionsAPIThreshold `json:"threshold,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type *SecurityDetectionsAPIThresholdRulePatchPropsType `json:"type,omitempty"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIThresholdRulePatchPropsType Rule type +type SecurityDetectionsAPIThresholdRulePatchPropsType string + +// SecurityDetectionsAPIThresholdRuleRequiredFields defines model for Security_Detections_API_ThresholdRuleRequiredFields. +type SecurityDetectionsAPIThresholdRuleRequiredFields struct { + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + Threshold SecurityDetectionsAPIThreshold `json:"threshold"` + + // Type Rule type + Type SecurityDetectionsAPIThresholdRuleRequiredFieldsType `json:"type"` +} + +// SecurityDetectionsAPIThresholdRuleRequiredFieldsType Rule type +type SecurityDetectionsAPIThresholdRuleRequiredFieldsType string + +// SecurityDetectionsAPIThresholdRuleResponseFields defines model for Security_Detections_API_ThresholdRuleResponseFields. +type SecurityDetectionsAPIThresholdRuleResponseFields struct { + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIThresholdAlertSuppression `json:"alert_suppression,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + Language SecurityDetectionsAPIKqlQueryLanguage `json:"language"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + Threshold SecurityDetectionsAPIThreshold `json:"threshold"` + + // Type Rule type + Type SecurityDetectionsAPIThresholdRuleResponseFieldsType `json:"type"` +} + +// SecurityDetectionsAPIThresholdRuleResponseFieldsType Rule type +type SecurityDetectionsAPIThresholdRuleResponseFieldsType string + +// SecurityDetectionsAPIThresholdRuleUpdateProps defines model for Security_Detections_API_ThresholdRuleUpdateProps. +type SecurityDetectionsAPIThresholdRuleUpdateProps struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIThresholdAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Id A universally unique identifier + Id *SecurityDetectionsAPIRuleObjectId `json:"id,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + Threshold SecurityDetectionsAPIThreshold `json:"threshold"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type SecurityDetectionsAPIThresholdRuleUpdatePropsType `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// SecurityDetectionsAPIThresholdRuleUpdatePropsType Rule type +type SecurityDetectionsAPIThresholdRuleUpdatePropsType string + +// SecurityDetectionsAPIThresholdValue The threshold value from which an alert is generated. +type SecurityDetectionsAPIThresholdValue = int + +// SecurityDetectionsAPIThrottleForBulkActions Defines the maximum interval in which a rule’s actions are executed. +// > info +// > The rule level `throttle` field is deprecated in Elastic Security 8.8 and will remain active for at least the next 12 months. +// > In Elastic Security 8.8 and later, you can use the `frequency` field to define frequencies for individual actions. Actions without frequencies will acquire a converted version of the rule’s `throttle` field. In the response, the converted `throttle` setting appears in the individual actions' `frequency` field. +type SecurityDetectionsAPIThrottleForBulkActions string + +// SecurityDetectionsAPITiebreakerField Sets a secondary field for sorting events +type SecurityDetectionsAPITiebreakerField = string + +// SecurityDetectionsAPITimelineTemplateId Timeline template ID +type SecurityDetectionsAPITimelineTemplateId = string + +// SecurityDetectionsAPITimelineTemplateTitle Timeline template title +type SecurityDetectionsAPITimelineTemplateTitle = string + +// SecurityDetectionsAPITimestampField Specifies the name of the event timestamp field used for sorting a sequence of events. Not to be confused with `timestamp_override`, which specifies the more general field used for querying events within a range. Defaults to the @timestamp ECS field. +type SecurityDetectionsAPITimestampField = string + +// SecurityDetectionsAPITimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. +type SecurityDetectionsAPITimestampOverride = string + +// SecurityDetectionsAPITimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field +type SecurityDetectionsAPITimestampOverrideFallbackDisabled = bool + +// SecurityDetectionsAPIUUID A universally unique identifier +type SecurityDetectionsAPIUUID = openapi_types.UUID + +// SecurityDetectionsAPIWarningSchema defines model for Security_Detections_API_WarningSchema. +type SecurityDetectionsAPIWarningSchema struct { + ActionPath string `json:"actionPath"` + ButtonLabel *string `json:"buttonLabel,omitempty"` + Message string `json:"message"` + Type string `json:"type"` +} + +// SecurityEndpointExceptionsAPIEndpointList defines model for Security_Endpoint_Exceptions_API_EndpointList. +type SecurityEndpointExceptionsAPIEndpointList struct { + union json.RawMessage +} + +// SecurityEndpointExceptionsAPIEndpointList1 defines model for . +type SecurityEndpointExceptionsAPIEndpointList1 = map[string]interface{} + +// SecurityEndpointExceptionsAPIEndpointListItem defines model for Security_Endpoint_Exceptions_API_EndpointListItem. +type SecurityEndpointExceptionsAPIEndpointListItem = SecurityEndpointExceptionsAPIExceptionListItem + +// SecurityEndpointExceptionsAPIExceptionList defines model for Security_Endpoint_Exceptions_API_ExceptionList. +type SecurityEndpointExceptionsAPIExceptionList struct { + // UnderscoreVersion The version id, normally returned by the API when the item was retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *string `json:"_version,omitempty"` + + // CreatedAt Autogenerated date of object creation. + CreatedAt time.Time `json:"created_at"` + + // CreatedBy Autogenerated value - user that created object. + CreatedBy string `json:"created_by"` + + // Description Describes the exception list. + Description SecurityEndpointExceptionsAPIExceptionListDescription `json:"description"` + + // Id Exception list's identifier. + Id SecurityEndpointExceptionsAPIExceptionListId `json:"id"` + Immutable bool `json:"immutable"` + + // ListId The exception list's human readable string identifier, `endpoint_list`. + ListId SecurityEndpointExceptionsAPIExceptionListHumanId `json:"list_id"` + + // Meta Placeholder for metadata about the list container. + Meta *SecurityEndpointExceptionsAPIExceptionListMeta `json:"meta,omitempty"` + + // Name The name of the exception list. + Name SecurityEndpointExceptionsAPIExceptionListName `json:"name"` + + // NamespaceType Determines whether the exception container is available in all Kibana spaces or just the space + // in which it is created, where: + // + // - `single`: Only available in the Kibana space in which it is created. + // - `agnostic`: Available in all Kibana spaces. + NamespaceType SecurityEndpointExceptionsAPIExceptionNamespaceType `json:"namespace_type"` + + // OsTypes Use this field to specify the operating system. Only enter one value. + OsTypes *SecurityEndpointExceptionsAPIExceptionListOsTypeArray `json:"os_types,omitempty"` + + // Tags String array containing words and phrases to help categorize exception containers. + Tags *SecurityEndpointExceptionsAPIExceptionListTags `json:"tags,omitempty"` + + // TieBreakerId Field used in search to ensure all containers are sorted and returned correctly. + TieBreakerId string `json:"tie_breaker_id"` + + // Type The type of exception list to be created. Different list types may denote where they can be utilized. + Type SecurityEndpointExceptionsAPIExceptionListType `json:"type"` + + // UpdatedAt Autogenerated date of last object update. + UpdatedAt time.Time `json:"updated_at"` + + // UpdatedBy Autogenerated value - user that last updated object. + UpdatedBy string `json:"updated_by"` + + // Version The document version, automatically increasd on updates. + Version SecurityEndpointExceptionsAPIExceptionListVersion `json:"version"` +} + +// SecurityEndpointExceptionsAPIExceptionListDescription Describes the exception list. +type SecurityEndpointExceptionsAPIExceptionListDescription = string + +// SecurityEndpointExceptionsAPIExceptionListHumanId The exception list's human readable string identifier, `endpoint_list`. +type SecurityEndpointExceptionsAPIExceptionListHumanId = string + +// SecurityEndpointExceptionsAPIExceptionListId Exception list's identifier. +type SecurityEndpointExceptionsAPIExceptionListId = string + +// SecurityEndpointExceptionsAPIExceptionListItem defines model for Security_Endpoint_Exceptions_API_ExceptionListItem. +type SecurityEndpointExceptionsAPIExceptionListItem struct { + // UnderscoreVersion The version id, normally returned by the API when the item was retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *string `json:"_version,omitempty"` + + // Comments Array of comment fields: + // + // - comment (string): Comments about the exception item. + Comments SecurityEndpointExceptionsAPIExceptionListItemCommentArray `json:"comments"` + + // CreatedAt Autogenerated date of object creation. + CreatedAt time.Time `json:"created_at"` + + // CreatedBy Autogenerated value - user that created object. + CreatedBy string `json:"created_by"` + + // Description Describes the exception list. + Description SecurityEndpointExceptionsAPIExceptionListItemDescription `json:"description"` + Entries SecurityEndpointExceptionsAPIExceptionListItemEntryArray `json:"entries"` + + // ExpireTime The exception item’s expiration date, in ISO format. This field is only available for regular exception items, not endpoint exceptions. + ExpireTime *SecurityEndpointExceptionsAPIExceptionListItemExpireTime `json:"expire_time,omitempty"` + + // Id Exception's identifier. + Id SecurityEndpointExceptionsAPIExceptionListItemId `json:"id"` + + // ItemId Human readable string identifier, e.g. `trusted-linux-processes` + ItemId SecurityEndpointExceptionsAPIExceptionListItemHumanId `json:"item_id"` + + // ListId The exception list's human readable string identifier, `endpoint_list`. + ListId SecurityEndpointExceptionsAPIExceptionListHumanId `json:"list_id"` + Meta *SecurityEndpointExceptionsAPIExceptionListItemMeta `json:"meta,omitempty"` + + // Name Exception list name. + Name SecurityEndpointExceptionsAPIExceptionListItemName `json:"name"` + + // NamespaceType Determines whether the exception container is available in all Kibana spaces or just the space + // in which it is created, where: + // + // - `single`: Only available in the Kibana space in which it is created. + // - `agnostic`: Available in all Kibana spaces. + NamespaceType SecurityEndpointExceptionsAPIExceptionNamespaceType `json:"namespace_type"` + OsTypes *SecurityEndpointExceptionsAPIExceptionListItemOsTypeArray `json:"os_types,omitempty"` + Tags *SecurityEndpointExceptionsAPIExceptionListItemTags `json:"tags,omitempty"` + + // TieBreakerId Field used in search to ensure all containers are sorted and returned correctly. + TieBreakerId string `json:"tie_breaker_id"` + Type SecurityEndpointExceptionsAPIExceptionListItemType `json:"type"` + + // UpdatedAt Autogenerated date of last object update. + UpdatedAt time.Time `json:"updated_at"` + + // UpdatedBy Autogenerated value - user that last updated object. + UpdatedBy string `json:"updated_by"` +} + +// SecurityEndpointExceptionsAPIExceptionListItemComment defines model for Security_Endpoint_Exceptions_API_ExceptionListItemComment. +type SecurityEndpointExceptionsAPIExceptionListItemComment struct { + // Comment A string that does not contain only whitespace characters + Comment SecurityEndpointExceptionsAPINonEmptyString `json:"comment"` + + // CreatedAt Autogenerated date of object creation. + CreatedAt time.Time `json:"created_at"` + + // CreatedBy A string that does not contain only whitespace characters + CreatedBy SecurityEndpointExceptionsAPINonEmptyString `json:"created_by"` + + // Id A string that does not contain only whitespace characters + Id SecurityEndpointExceptionsAPINonEmptyString `json:"id"` + + // UpdatedAt Autogenerated date of last object update. + UpdatedAt *time.Time `json:"updated_at,omitempty"` + + // UpdatedBy A string that does not contain only whitespace characters + UpdatedBy *SecurityEndpointExceptionsAPINonEmptyString `json:"updated_by,omitempty"` +} + +// SecurityEndpointExceptionsAPIExceptionListItemCommentArray Array of comment fields: +// +// - comment (string): Comments about the exception item. +type SecurityEndpointExceptionsAPIExceptionListItemCommentArray = []SecurityEndpointExceptionsAPIExceptionListItemComment + +// SecurityEndpointExceptionsAPIExceptionListItemDescription Describes the exception list. +type SecurityEndpointExceptionsAPIExceptionListItemDescription = string + +// SecurityEndpointExceptionsAPIExceptionListItemEntry defines model for Security_Endpoint_Exceptions_API_ExceptionListItemEntry. +type SecurityEndpointExceptionsAPIExceptionListItemEntry struct { + union json.RawMessage +} + +// SecurityEndpointExceptionsAPIExceptionListItemEntryArray defines model for Security_Endpoint_Exceptions_API_ExceptionListItemEntryArray. +type SecurityEndpointExceptionsAPIExceptionListItemEntryArray = []SecurityEndpointExceptionsAPIExceptionListItemEntry + +// SecurityEndpointExceptionsAPIExceptionListItemEntryExists defines model for Security_Endpoint_Exceptions_API_ExceptionListItemEntryExists. +type SecurityEndpointExceptionsAPIExceptionListItemEntryExists struct { + // Field A string that does not contain only whitespace characters + Field SecurityEndpointExceptionsAPINonEmptyString `json:"field"` + Operator SecurityEndpointExceptionsAPIExceptionListItemEntryOperator `json:"operator"` + Type SecurityEndpointExceptionsAPIExceptionListItemEntryExistsType `json:"type"` +} + +// SecurityEndpointExceptionsAPIExceptionListItemEntryExistsType defines model for SecurityEndpointExceptionsAPIExceptionListItemEntryExists.Type. +type SecurityEndpointExceptionsAPIExceptionListItemEntryExistsType string + +// SecurityEndpointExceptionsAPIExceptionListItemEntryList defines model for Security_Endpoint_Exceptions_API_ExceptionListItemEntryList. +type SecurityEndpointExceptionsAPIExceptionListItemEntryList struct { + // Field A string that does not contain only whitespace characters + Field SecurityEndpointExceptionsAPINonEmptyString `json:"field"` + List struct { + // Id Value list's identifier. + Id SecurityEndpointExceptionsAPIListId `json:"id"` + + // Type Specifies the Elasticsearch data type of excludes the list container holds. Some common examples: + // + // - `keyword`: Many ECS fields are Elasticsearch keywords + // - `ip`: IP addresses + // - `ip_range`: Range of IP addresses (supports IPv4, IPv6, and CIDR notation) + Type SecurityEndpointExceptionsAPIListType `json:"type"` + } `json:"list"` + Operator SecurityEndpointExceptionsAPIExceptionListItemEntryOperator `json:"operator"` + Type SecurityEndpointExceptionsAPIExceptionListItemEntryListType `json:"type"` +} + +// SecurityEndpointExceptionsAPIExceptionListItemEntryListType defines model for SecurityEndpointExceptionsAPIExceptionListItemEntryList.Type. +type SecurityEndpointExceptionsAPIExceptionListItemEntryListType string + +// SecurityEndpointExceptionsAPIExceptionListItemEntryMatch defines model for Security_Endpoint_Exceptions_API_ExceptionListItemEntryMatch. +type SecurityEndpointExceptionsAPIExceptionListItemEntryMatch struct { + // Field A string that does not contain only whitespace characters + Field SecurityEndpointExceptionsAPINonEmptyString `json:"field"` + Operator SecurityEndpointExceptionsAPIExceptionListItemEntryOperator `json:"operator"` + Type SecurityEndpointExceptionsAPIExceptionListItemEntryMatchType `json:"type"` + + // Value A string that does not contain only whitespace characters + Value SecurityEndpointExceptionsAPINonEmptyString `json:"value"` +} + +// SecurityEndpointExceptionsAPIExceptionListItemEntryMatchType defines model for SecurityEndpointExceptionsAPIExceptionListItemEntryMatch.Type. +type SecurityEndpointExceptionsAPIExceptionListItemEntryMatchType string + +// SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny defines model for Security_Endpoint_Exceptions_API_ExceptionListItemEntryMatchAny. +type SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny struct { + // Field A string that does not contain only whitespace characters + Field SecurityEndpointExceptionsAPINonEmptyString `json:"field"` + Operator SecurityEndpointExceptionsAPIExceptionListItemEntryOperator `json:"operator"` + Type SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAnyType `json:"type"` + Value []SecurityEndpointExceptionsAPINonEmptyString `json:"value"` +} + +// SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAnyType defines model for SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny.Type. +type SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAnyType string + +// SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard defines model for Security_Endpoint_Exceptions_API_ExceptionListItemEntryMatchWildcard. +type SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard struct { + // Field A string that does not contain only whitespace characters + Field SecurityEndpointExceptionsAPINonEmptyString `json:"field"` + Operator SecurityEndpointExceptionsAPIExceptionListItemEntryOperator `json:"operator"` + Type SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcardType `json:"type"` + + // Value A string that does not contain only whitespace characters + Value SecurityEndpointExceptionsAPINonEmptyString `json:"value"` +} + +// SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcardType defines model for SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard.Type. +type SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcardType string + +// SecurityEndpointExceptionsAPIExceptionListItemEntryNested defines model for Security_Endpoint_Exceptions_API_ExceptionListItemEntryNested. +type SecurityEndpointExceptionsAPIExceptionListItemEntryNested struct { + Entries []SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem `json:"entries"` + + // Field A string that does not contain only whitespace characters + Field SecurityEndpointExceptionsAPINonEmptyString `json:"field"` + Type SecurityEndpointExceptionsAPIExceptionListItemEntryNestedType `json:"type"` +} + +// SecurityEndpointExceptionsAPIExceptionListItemEntryNestedType defines model for SecurityEndpointExceptionsAPIExceptionListItemEntryNested.Type. +type SecurityEndpointExceptionsAPIExceptionListItemEntryNestedType string + +// SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem defines model for Security_Endpoint_Exceptions_API_ExceptionListItemEntryNestedEntryItem. +type SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem struct { + union json.RawMessage +} + +// SecurityEndpointExceptionsAPIExceptionListItemEntryOperator defines model for Security_Endpoint_Exceptions_API_ExceptionListItemEntryOperator. +type SecurityEndpointExceptionsAPIExceptionListItemEntryOperator string + +// SecurityEndpointExceptionsAPIExceptionListItemExpireTime The exception item’s expiration date, in ISO format. This field is only available for regular exception items, not endpoint exceptions. +type SecurityEndpointExceptionsAPIExceptionListItemExpireTime = time.Time + +// SecurityEndpointExceptionsAPIExceptionListItemHumanId Human readable string identifier, e.g. `trusted-linux-processes` +type SecurityEndpointExceptionsAPIExceptionListItemHumanId = string + +// SecurityEndpointExceptionsAPIExceptionListItemId Exception's identifier. +type SecurityEndpointExceptionsAPIExceptionListItemId = string + +// SecurityEndpointExceptionsAPIExceptionListItemMeta defines model for Security_Endpoint_Exceptions_API_ExceptionListItemMeta. +type SecurityEndpointExceptionsAPIExceptionListItemMeta map[string]interface{} + +// SecurityEndpointExceptionsAPIExceptionListItemName Exception list name. +type SecurityEndpointExceptionsAPIExceptionListItemName = string + +// SecurityEndpointExceptionsAPIExceptionListItemOsTypeArray defines model for Security_Endpoint_Exceptions_API_ExceptionListItemOsTypeArray. +type SecurityEndpointExceptionsAPIExceptionListItemOsTypeArray = []SecurityEndpointExceptionsAPIExceptionListOsType + +// SecurityEndpointExceptionsAPIExceptionListItemTags defines model for Security_Endpoint_Exceptions_API_ExceptionListItemTags. +type SecurityEndpointExceptionsAPIExceptionListItemTags = []string + +// SecurityEndpointExceptionsAPIExceptionListItemType defines model for Security_Endpoint_Exceptions_API_ExceptionListItemType. +type SecurityEndpointExceptionsAPIExceptionListItemType string + +// SecurityEndpointExceptionsAPIExceptionListMeta Placeholder for metadata about the list container. +type SecurityEndpointExceptionsAPIExceptionListMeta map[string]interface{} + +// SecurityEndpointExceptionsAPIExceptionListName The name of the exception list. +type SecurityEndpointExceptionsAPIExceptionListName = string + +// SecurityEndpointExceptionsAPIExceptionListOsType Use this field to specify the operating system. +type SecurityEndpointExceptionsAPIExceptionListOsType string + +// SecurityEndpointExceptionsAPIExceptionListOsTypeArray Use this field to specify the operating system. Only enter one value. +type SecurityEndpointExceptionsAPIExceptionListOsTypeArray = []SecurityEndpointExceptionsAPIExceptionListOsType + +// SecurityEndpointExceptionsAPIExceptionListTags String array containing words and phrases to help categorize exception containers. +type SecurityEndpointExceptionsAPIExceptionListTags = []string + +// SecurityEndpointExceptionsAPIExceptionListType The type of exception list to be created. Different list types may denote where they can be utilized. +type SecurityEndpointExceptionsAPIExceptionListType string + +// SecurityEndpointExceptionsAPIExceptionListVersion The document version, automatically increasd on updates. +type SecurityEndpointExceptionsAPIExceptionListVersion = int + +// SecurityEndpointExceptionsAPIExceptionNamespaceType Determines whether the exception container is available in all Kibana spaces or just the space +// in which it is created, where: +// +// - `single`: Only available in the Kibana space in which it is created. +// - `agnostic`: Available in all Kibana spaces. +type SecurityEndpointExceptionsAPIExceptionNamespaceType string + +// SecurityEndpointExceptionsAPIFindEndpointListItemsFilter A string that does not contain only whitespace characters +type SecurityEndpointExceptionsAPIFindEndpointListItemsFilter = SecurityEndpointExceptionsAPINonEmptyString + +// SecurityEndpointExceptionsAPIListId Value list's identifier. +type SecurityEndpointExceptionsAPIListId = string + +// SecurityEndpointExceptionsAPIListType Specifies the Elasticsearch data type of excludes the list container holds. Some common examples: +// +// - `keyword`: Many ECS fields are Elasticsearch keywords +// - `ip`: IP addresses +// - `ip_range`: Range of IP addresses (supports IPv4, IPv6, and CIDR notation) +type SecurityEndpointExceptionsAPIListType string + +// SecurityEndpointExceptionsAPINonEmptyString A string that does not contain only whitespace characters +type SecurityEndpointExceptionsAPINonEmptyString = string + +// SecurityEndpointExceptionsAPIPlatformErrorResponse defines model for Security_Endpoint_Exceptions_API_PlatformErrorResponse. +type SecurityEndpointExceptionsAPIPlatformErrorResponse struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode int `json:"statusCode"` +} + +// SecurityEndpointExceptionsAPISiemErrorResponse defines model for Security_Endpoint_Exceptions_API_SiemErrorResponse. +type SecurityEndpointExceptionsAPISiemErrorResponse struct { + Message string `json:"message"` + StatusCode int `json:"status_code"` +} + +// SecurityEndpointManagementAPIActionStateSuccessResponse defines model for Security_Endpoint_Management_API_ActionStateSuccessResponse. +type SecurityEndpointManagementAPIActionStateSuccessResponse struct { + Body struct { + Data struct { + CanEncrypt *bool `json:"canEncrypt,omitempty"` + } `json:"data"` + } `json:"body"` +} + +// SecurityEndpointManagementAPIActionStatusSuccessResponse defines model for Security_Endpoint_Management_API_ActionStatusSuccessResponse. +type SecurityEndpointManagementAPIActionStatusSuccessResponse struct { + Body struct { + Data struct { + // AgentId Agent ID + AgentId SecurityEndpointManagementAPIAgentId `json:"agent_id"` + PendingActions SecurityEndpointManagementAPIPendingActionsSchema `json:"pending_actions"` + } `json:"data"` + } `json:"body"` +} + +// SecurityEndpointManagementAPIAgentId Agent ID +type SecurityEndpointManagementAPIAgentId = string + +// SecurityEndpointManagementAPIAgentIds A list of agent IDs. Max of 50. +type SecurityEndpointManagementAPIAgentIds struct { + union json.RawMessage +} + +// SecurityEndpointManagementAPIAgentIds0 defines model for . +type SecurityEndpointManagementAPIAgentIds0 = []string + +// SecurityEndpointManagementAPIAgentIds1 defines model for . +type SecurityEndpointManagementAPIAgentIds1 = string + +// SecurityEndpointManagementAPIAgentTypes List of agent types to retrieve. Defaults to `endpoint`. +type SecurityEndpointManagementAPIAgentTypes string + +// SecurityEndpointManagementAPICloudFileScriptParameters defines model for Security_Endpoint_Management_API_CloudFileScriptParameters. +type SecurityEndpointManagementAPICloudFileScriptParameters struct { + // CloudFile Script name in cloud storage. + CloudFile string `json:"cloudFile"` + + // CommandLine Command line arguments. + CommandLine *string `json:"commandLine,omitempty"` + + // Timeout Timeout in seconds. + Timeout *int `json:"timeout,omitempty"` +} + +// SecurityEndpointManagementAPICommand The command to be executed (cannot be an empty string) +type SecurityEndpointManagementAPICommand string + +// SecurityEndpointManagementAPICommands A list of response action command names. +type SecurityEndpointManagementAPICommands = []SecurityEndpointManagementAPICommand + +// SecurityEndpointManagementAPIComment Optional comment +type SecurityEndpointManagementAPIComment = string + +// SecurityEndpointManagementAPIEndDate An end date in ISO format or Date Math format. +type SecurityEndpointManagementAPIEndDate = string + +// SecurityEndpointManagementAPIEndpointIds List of endpoint IDs (cannot contain empty strings) +type SecurityEndpointManagementAPIEndpointIds = []string + +// SecurityEndpointManagementAPIEndpointMetadataResponse defines model for Security_Endpoint_Management_API_EndpointMetadataResponse. +type SecurityEndpointManagementAPIEndpointMetadataResponse = map[string]interface{} + +// SecurityEndpointManagementAPIExecuteRouteRequestBody defines model for Security_Endpoint_Management_API_ExecuteRouteRequestBody. +type SecurityEndpointManagementAPIExecuteRouteRequestBody struct { + // AgentType List of agent types to retrieve. Defaults to `endpoint`. + AgentType *SecurityEndpointManagementAPIAgentTypes `json:"agent_type,omitempty"` + + // AlertIds If this action is associated with any alerts, they can be specified here. The action will be logged in any cases associated with the specified alerts. + AlertIds *[]string `json:"alert_ids,omitempty"` + + // CaseIds The IDs of cases where the action taken will be logged. + CaseIds *[]string `json:"case_ids,omitempty"` + + // Comment Optional comment + Comment *SecurityEndpointManagementAPIComment `json:"comment,omitempty"` + + // EndpointIds List of endpoint IDs (cannot contain empty strings) + EndpointIds SecurityEndpointManagementAPIEndpointIds `json:"endpoint_ids"` + Parameters struct { + // Command The command to be executed (cannot be an empty string) + Command SecurityEndpointManagementAPICommand `json:"command"` + + // Timeout The maximum timeout value in milliseconds (optional) + Timeout *SecurityEndpointManagementAPITimeout `json:"timeout,omitempty"` + } `json:"parameters"` +} + +// SecurityEndpointManagementAPIExecuteRouteResponse defines model for Security_Endpoint_Management_API_ExecuteRouteResponse. +type SecurityEndpointManagementAPIExecuteRouteResponse = map[string]interface{} + +// SecurityEndpointManagementAPIGetEndpointActionListResponse defines model for Security_Endpoint_Management_API_GetEndpointActionListResponse. +type SecurityEndpointManagementAPIGetEndpointActionListResponse = map[string]interface{} + +// SecurityEndpointManagementAPIGetEndpointActionResponse defines model for Security_Endpoint_Management_API_GetEndpointActionResponse. +type SecurityEndpointManagementAPIGetEndpointActionResponse = map[string]interface{} + +// SecurityEndpointManagementAPIGetFileRouteRequestBody defines model for Security_Endpoint_Management_API_GetFileRouteRequestBody. +type SecurityEndpointManagementAPIGetFileRouteRequestBody struct { + // AgentType List of agent types to retrieve. Defaults to `endpoint`. + AgentType *SecurityEndpointManagementAPIAgentTypes `json:"agent_type,omitempty"` + + // AlertIds If this action is associated with any alerts, they can be specified here. The action will be logged in any cases associated with the specified alerts. + AlertIds *[]string `json:"alert_ids,omitempty"` + + // CaseIds The IDs of cases where the action taken will be logged. + CaseIds *[]string `json:"case_ids,omitempty"` + + // Comment Optional comment + Comment *SecurityEndpointManagementAPIComment `json:"comment,omitempty"` + + // EndpointIds List of endpoint IDs (cannot contain empty strings) + EndpointIds SecurityEndpointManagementAPIEndpointIds `json:"endpoint_ids"` + Parameters struct { + Path string `json:"path"` + } `json:"parameters"` +} + +// SecurityEndpointManagementAPIGetFileRouteResponse defines model for Security_Endpoint_Management_API_GetFileRouteResponse. +type SecurityEndpointManagementAPIGetFileRouteResponse = map[string]interface{} + +// SecurityEndpointManagementAPIGetProcessesRouteRequestBody defines model for Security_Endpoint_Management_API_GetProcessesRouteRequestBody. +type SecurityEndpointManagementAPIGetProcessesRouteRequestBody struct { + // AgentType List of agent types to retrieve. Defaults to `endpoint`. + AgentType *SecurityEndpointManagementAPIAgentTypes `json:"agent_type,omitempty"` + + // AlertIds If this action is associated with any alerts, they can be specified here. The action will be logged in any cases associated with the specified alerts. + AlertIds *[]string `json:"alert_ids,omitempty"` + + // CaseIds The IDs of cases where the action taken will be logged. + CaseIds *[]string `json:"case_ids,omitempty"` + + // Comment Optional comment + Comment *SecurityEndpointManagementAPIComment `json:"comment,omitempty"` + + // EndpointIds List of endpoint IDs (cannot contain empty strings) + EndpointIds SecurityEndpointManagementAPIEndpointIds `json:"endpoint_ids"` + + // Parameters Optional parameters object + Parameters *SecurityEndpointManagementAPIParameters `json:"parameters,omitempty"` +} + +// SecurityEndpointManagementAPIGetProcessesRouteResponse defines model for Security_Endpoint_Management_API_GetProcessesRouteResponse. +type SecurityEndpointManagementAPIGetProcessesRouteResponse = map[string]interface{} + +// SecurityEndpointManagementAPIHostPathScriptParameters defines model for Security_Endpoint_Management_API_HostPathScriptParameters. +type SecurityEndpointManagementAPIHostPathScriptParameters struct { + // CommandLine Command line arguments. + CommandLine *string `json:"commandLine,omitempty"` + + // HostPath Absolute or relative path of script on host machine. + HostPath string `json:"hostPath"` + + // Timeout Timeout in seconds. + Timeout *int `json:"timeout,omitempty"` +} + +// SecurityEndpointManagementAPIHostStatuses A set of agent health statuses to filter by. +type SecurityEndpointManagementAPIHostStatuses = []string + +// SecurityEndpointManagementAPIIsolateRouteResponse defines model for Security_Endpoint_Management_API_IsolateRouteResponse. +type SecurityEndpointManagementAPIIsolateRouteResponse = map[string]interface{} + +// SecurityEndpointManagementAPIKillProcessRouteRequestBody defines model for Security_Endpoint_Management_API_KillProcessRouteRequestBody. +type SecurityEndpointManagementAPIKillProcessRouteRequestBody struct { + // AgentType List of agent types to retrieve. Defaults to `endpoint`. + AgentType *SecurityEndpointManagementAPIAgentTypes `json:"agent_type,omitempty"` + + // AlertIds If this action is associated with any alerts, they can be specified here. The action will be logged in any cases associated with the specified alerts. + AlertIds *[]string `json:"alert_ids,omitempty"` + + // CaseIds The IDs of cases where the action taken will be logged. + CaseIds *[]string `json:"case_ids,omitempty"` + + // Comment Optional comment + Comment *SecurityEndpointManagementAPIComment `json:"comment,omitempty"` + + // EndpointIds List of endpoint IDs (cannot contain empty strings) + EndpointIds SecurityEndpointManagementAPIEndpointIds `json:"endpoint_ids"` + Parameters SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters `json:"parameters"` +} + +// SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0 defines model for . +type SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0 struct { + // Pid The process ID (PID) of the process to terminate. + Pid *int `json:"pid,omitempty"` +} + +// SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1 defines model for . +type SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1 struct { + // EntityId The entity ID of the process to terminate. + EntityId *string `json:"entity_id,omitempty"` +} + +// SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2 defines model for . +type SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2 struct { + // ProcessName The name of the process to terminate. Valid for SentinelOne agent type only. + ProcessName *string `json:"process_name,omitempty"` +} + +// SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters defines model for SecurityEndpointManagementAPIKillProcessRouteRequestBody.Parameters. +type SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters struct { + union json.RawMessage +} + +// SecurityEndpointManagementAPIKillProcessRouteResponse defines model for Security_Endpoint_Management_API_KillProcessRouteResponse. +type SecurityEndpointManagementAPIKillProcessRouteResponse = map[string]interface{} + +// SecurityEndpointManagementAPIKuery A KQL string. +type SecurityEndpointManagementAPIKuery = string + +// SecurityEndpointManagementAPIMetadataListResponse defines model for Security_Endpoint_Management_API_MetadataListResponse. +type SecurityEndpointManagementAPIMetadataListResponse = map[string]interface{} + +// SecurityEndpointManagementAPIPage Page number +type SecurityEndpointManagementAPIPage = int + +// SecurityEndpointManagementAPIPageSize Number of items per page +type SecurityEndpointManagementAPIPageSize = int + +// SecurityEndpointManagementAPIParameters Optional parameters object +type SecurityEndpointManagementAPIParameters = map[string]interface{} + +// SecurityEndpointManagementAPIPendingActionDataType defines model for Security_Endpoint_Management_API_PendingActionDataType. +type SecurityEndpointManagementAPIPendingActionDataType = int + +// SecurityEndpointManagementAPIPendingActionsSchema defines model for Security_Endpoint_Management_API_PendingActionsSchema. +type SecurityEndpointManagementAPIPendingActionsSchema struct { + union json.RawMessage +} + +// SecurityEndpointManagementAPIPendingActionsSchema0 defines model for . +type SecurityEndpointManagementAPIPendingActionsSchema0 struct { + Execute *SecurityEndpointManagementAPIPendingActionDataType `json:"execute,omitempty"` + GetFile *SecurityEndpointManagementAPIPendingActionDataType `json:"get-file,omitempty"` + Isolate *SecurityEndpointManagementAPIPendingActionDataType `json:"isolate,omitempty"` + KillProcess *SecurityEndpointManagementAPIPendingActionDataType `json:"kill-process,omitempty"` + RunningProcesses *SecurityEndpointManagementAPIPendingActionDataType `json:"running-processes,omitempty"` + Scan *SecurityEndpointManagementAPIPendingActionDataType `json:"scan,omitempty"` + SuspendProcess *SecurityEndpointManagementAPIPendingActionDataType `json:"suspend-process,omitempty"` + Unisolate *SecurityEndpointManagementAPIPendingActionDataType `json:"unisolate,omitempty"` + Upload *SecurityEndpointManagementAPIPendingActionDataType `json:"upload,omitempty"` +} + +// SecurityEndpointManagementAPIPendingActionsSchema1 defines model for . +type SecurityEndpointManagementAPIPendingActionsSchema1 map[string]interface{} + +// SecurityEndpointManagementAPIProtectionUpdatesNoteResponse defines model for Security_Endpoint_Management_API_ProtectionUpdatesNoteResponse. +type SecurityEndpointManagementAPIProtectionUpdatesNoteResponse struct { + Note *string `json:"note,omitempty"` +} + +// SecurityEndpointManagementAPIRawScriptParameters defines model for Security_Endpoint_Management_API_RawScriptParameters. +type SecurityEndpointManagementAPIRawScriptParameters struct { + // CommandLine Command line arguments. + CommandLine *string `json:"commandLine,omitempty"` + + // Raw Raw script content. + Raw string `json:"raw"` + + // Timeout Timeout in seconds. + Timeout *int `json:"timeout,omitempty"` +} + +// SecurityEndpointManagementAPIResponseActionCreateSuccessResponse defines model for Security_Endpoint_Management_API_ResponseActionCreateSuccessResponse. +type SecurityEndpointManagementAPIResponseActionCreateSuccessResponse struct { + // Data The created response action details + Data *struct { + // AgentState The state of the response action for each agent ID that it was sent to + AgentState *map[string]struct { + // CompletedAt The date and time the response action was completed for the agent ID + CompletedAt *string `json:"completedAt,omitempty"` + + // IsCompleted Whether the response action is completed for the agent ID + IsCompleted *bool `json:"isCompleted,omitempty"` + + // WasSuccessful Whether the response action was successful for the agent ID + WasSuccessful *bool `json:"wasSuccessful,omitempty"` + } `json:"agentState,omitempty"` + + // AgentType The response action agent type + AgentType *string `json:"agentType,omitempty"` + + // Agents The agent IDs for the hosts that the response action was sent to + Agents *map[string]interface{} `json:"agents,omitempty"` + + // Command The response action command + Command *string `json:"command,omitempty"` + + // CreatedBy The user who created the response action + CreatedBy *string `json:"createdBy,omitempty"` + + // Hosts An object containing the host names associated with the agent IDs the response action was sent to + Hosts *map[string]struct { + // Name The host name + Name *string `json:"name,omitempty"` + } `json:"hosts,omitempty"` + + // Id The response action ID + Id *string `json:"id,omitempty"` + + // IsComplete Whether the response action is complete + IsComplete *bool `json:"isComplete,omitempty"` + + // IsExpired Whether the response action is expired + IsExpired *bool `json:"isExpired,omitempty"` + + // Outputs The outputs of the response action for each agent ID that it was sent to + Outputs *map[string]struct { + // Content The response action output content for the agent ID. Exact format depends on the response action command. + Content SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content `json:"content"` + Type SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsType `json:"type"` + } `json:"outputs,omitempty"` + + // Parameters The parameters of the response action. Content different depending on the response action command + Parameters *map[string]interface{} `json:"parameters,omitempty"` + + // StartedAt The response action start time + StartedAt *string `json:"startedAt,omitempty"` + + // Status The response action status + Status *string `json:"status,omitempty"` + + // WasSuccessful Whether the response action was successful + WasSuccessful *bool `json:"wasSuccessful,omitempty"` + } `json:"data,omitempty"` +} + +// SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0 defines model for . +type SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0 = map[string]interface{} + +// SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1 defines model for . +type SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1 = string + +// SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content The response action output content for the agent ID. Exact format depends on the response action command. +type SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content struct { + union json.RawMessage +} + +// SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsType defines model for SecurityEndpointManagementAPIResponseActionCreateSuccessResponse.Data.Outputs.Type. +type SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsType string + +// SecurityEndpointManagementAPIRunScriptRouteRequestBody defines model for Security_Endpoint_Management_API_RunScriptRouteRequestBody. +type SecurityEndpointManagementAPIRunScriptRouteRequestBody struct { + // AgentType List of agent types to retrieve. Defaults to `endpoint`. + AgentType *SecurityEndpointManagementAPIAgentTypes `json:"agent_type,omitempty"` + + // AlertIds If this action is associated with any alerts, they can be specified here. The action will be logged in any cases associated with the specified alerts. + AlertIds *[]string `json:"alert_ids,omitempty"` + + // CaseIds The IDs of cases where the action taken will be logged. + CaseIds *[]string `json:"case_ids,omitempty"` + + // Comment Optional comment + Comment *SecurityEndpointManagementAPIComment `json:"comment,omitempty"` + + // EndpointIds List of endpoint IDs (cannot contain empty strings) + EndpointIds SecurityEndpointManagementAPIEndpointIds `json:"endpoint_ids"` + + // Parameters One of the following set of parameters must be provided + Parameters SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters `json:"parameters"` +} + +// SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters One of the following set of parameters must be provided +type SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters struct { + union json.RawMessage +} + +// SecurityEndpointManagementAPIScanRouteRequestBody defines model for Security_Endpoint_Management_API_ScanRouteRequestBody. +type SecurityEndpointManagementAPIScanRouteRequestBody struct { + // AgentType List of agent types to retrieve. Defaults to `endpoint`. + AgentType *SecurityEndpointManagementAPIAgentTypes `json:"agent_type,omitempty"` + + // AlertIds If this action is associated with any alerts, they can be specified here. The action will be logged in any cases associated with the specified alerts. + AlertIds *[]string `json:"alert_ids,omitempty"` + + // CaseIds The IDs of cases where the action taken will be logged. + CaseIds *[]string `json:"case_ids,omitempty"` + + // Comment Optional comment + Comment *SecurityEndpointManagementAPIComment `json:"comment,omitempty"` + + // EndpointIds List of endpoint IDs (cannot contain empty strings) + EndpointIds SecurityEndpointManagementAPIEndpointIds `json:"endpoint_ids"` + Parameters struct { + // Path The folder or file’s full path (including the file name). + Path string `json:"path"` + } `json:"parameters"` +} + +// SecurityEndpointManagementAPIScanRouteResponse defines model for Security_Endpoint_Management_API_ScanRouteResponse. +type SecurityEndpointManagementAPIScanRouteResponse = map[string]interface{} + +// SecurityEndpointManagementAPISentinelOneRunScriptParameters Parameters for Run Script response action against SentinelOne agent type. +type SecurityEndpointManagementAPISentinelOneRunScriptParameters struct { + // InputParams The input parameter arguments for the script that was selected. + InputParams *string `json:"inputParams,omitempty"` + + // Script The script ID from SentinelOne scripts library that will be executed. + Script string `json:"script"` +} + +// SecurityEndpointManagementAPISortDirection Determines the sort order. +type SecurityEndpointManagementAPISortDirection string + +// SecurityEndpointManagementAPISortField Determines which field is used to sort the results. +type SecurityEndpointManagementAPISortField string + +// SecurityEndpointManagementAPIStartDate A start date in ISO 8601 format or Date Math format. +type SecurityEndpointManagementAPIStartDate = string + +// SecurityEndpointManagementAPISuccessResponse defines model for Security_Endpoint_Management_API_SuccessResponse. +type SecurityEndpointManagementAPISuccessResponse = map[string]interface{} + +// SecurityEndpointManagementAPISuspendProcessRouteRequestBody defines model for Security_Endpoint_Management_API_SuspendProcessRouteRequestBody. +type SecurityEndpointManagementAPISuspendProcessRouteRequestBody struct { + // AgentType List of agent types to retrieve. Defaults to `endpoint`. + AgentType *SecurityEndpointManagementAPIAgentTypes `json:"agent_type,omitempty"` + + // AlertIds If this action is associated with any alerts, they can be specified here. The action will be logged in any cases associated with the specified alerts. + AlertIds *[]string `json:"alert_ids,omitempty"` + + // CaseIds The IDs of cases where the action taken will be logged. + CaseIds *[]string `json:"case_ids,omitempty"` + + // Comment Optional comment + Comment *SecurityEndpointManagementAPIComment `json:"comment,omitempty"` + + // EndpointIds List of endpoint IDs (cannot contain empty strings) + EndpointIds SecurityEndpointManagementAPIEndpointIds `json:"endpoint_ids"` + Parameters SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters `json:"parameters"` +} + +// SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0 defines model for . +type SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0 struct { + // Pid The process ID (PID) of the process to suspend. + Pid *int `json:"pid,omitempty"` +} + +// SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1 defines model for . +type SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1 struct { + // EntityId The entity ID of the process to suspend. + EntityId *string `json:"entity_id,omitempty"` +} + +// SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters defines model for SecurityEndpointManagementAPISuspendProcessRouteRequestBody.Parameters. +type SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters struct { + union json.RawMessage +} + +// SecurityEndpointManagementAPISuspendProcessRouteResponse defines model for Security_Endpoint_Management_API_SuspendProcessRouteResponse. +type SecurityEndpointManagementAPISuspendProcessRouteResponse = map[string]interface{} + +// SecurityEndpointManagementAPITimeout The maximum timeout value in milliseconds (optional) +type SecurityEndpointManagementAPITimeout = int + +// SecurityEndpointManagementAPIType Type of response action +type SecurityEndpointManagementAPIType string + +// SecurityEndpointManagementAPITypes List of types of response actions +type SecurityEndpointManagementAPITypes = []SecurityEndpointManagementAPIType + +// SecurityEndpointManagementAPIUnisolateRouteResponse defines model for Security_Endpoint_Management_API_UnisolateRouteResponse. +type SecurityEndpointManagementAPIUnisolateRouteResponse = map[string]interface{} + +// SecurityEndpointManagementAPIUploadRouteRequestBody defines model for Security_Endpoint_Management_API_UploadRouteRequestBody. +type SecurityEndpointManagementAPIUploadRouteRequestBody struct { + // AgentType List of agent types to retrieve. Defaults to `endpoint`. + AgentType *SecurityEndpointManagementAPIAgentTypes `json:"agent_type,omitempty"` + + // AlertIds If this action is associated with any alerts, they can be specified here. The action will be logged in any cases associated with the specified alerts. + AlertIds *[]string `json:"alert_ids,omitempty"` + + // CaseIds The IDs of cases where the action taken will be logged. + CaseIds *[]string `json:"case_ids,omitempty"` + + // Comment Optional comment + Comment *SecurityEndpointManagementAPIComment `json:"comment,omitempty"` + + // EndpointIds List of endpoint IDs (cannot contain empty strings) + EndpointIds SecurityEndpointManagementAPIEndpointIds `json:"endpoint_ids"` + + // File The binary content of the file. + File openapi_types.File `json:"file"` + Parameters struct { + // Overwrite Overwrite the file on the host if it already exists. + Overwrite *bool `json:"overwrite,omitempty"` + } `json:"parameters"` +} + +// SecurityEndpointManagementAPIUploadRouteResponse defines model for Security_Endpoint_Management_API_UploadRouteResponse. +type SecurityEndpointManagementAPIUploadRouteResponse = map[string]interface{} + +// SecurityEndpointManagementAPIUserIds A list of user IDs. +type SecurityEndpointManagementAPIUserIds struct { + union json.RawMessage +} + +// SecurityEndpointManagementAPIUserIds0 defines model for . +type SecurityEndpointManagementAPIUserIds0 = []string + +// SecurityEndpointManagementAPIUserIds1 defines model for . +type SecurityEndpointManagementAPIUserIds1 = string + +// SecurityEndpointManagementAPIWithOutputs A list of action IDs that should include the complete output of the action. +type SecurityEndpointManagementAPIWithOutputs struct { + union json.RawMessage +} + +// SecurityEndpointManagementAPIWithOutputs0 defines model for . +type SecurityEndpointManagementAPIWithOutputs0 = []string + +// SecurityEndpointManagementAPIWithOutputs1 defines model for . +type SecurityEndpointManagementAPIWithOutputs1 = string + +// SecurityEntityAnalyticsAPIAssetCriticalityBulkUploadErrorItem defines model for Security_Entity_Analytics_API_AssetCriticalityBulkUploadErrorItem. +type SecurityEntityAnalyticsAPIAssetCriticalityBulkUploadErrorItem struct { + Index int `json:"index"` + Message string `json:"message"` +} + +// SecurityEntityAnalyticsAPIAssetCriticalityBulkUploadStats defines model for Security_Entity_Analytics_API_AssetCriticalityBulkUploadStats. +type SecurityEntityAnalyticsAPIAssetCriticalityBulkUploadStats struct { + Failed int `json:"failed"` + Successful int `json:"successful"` + Total int `json:"total"` +} + +// SecurityEntityAnalyticsAPIAssetCriticalityLevel The criticality level of the asset. +type SecurityEntityAnalyticsAPIAssetCriticalityLevel string + +// SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUpload The criticality level of the asset for bulk upload. The value `unassigned` is used to indicate that the criticality level is not assigned and is only used for bulk upload. +type SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUpload string + +// SecurityEntityAnalyticsAPIAssetCriticalityRecord defines model for Security_Entity_Analytics_API_AssetCriticalityRecord. +type SecurityEntityAnalyticsAPIAssetCriticalityRecord struct { + // Timestamp The time the record was created or updated. + Timestamp time.Time `json:"@timestamp"` + Asset struct { + // Criticality The criticality level of the asset. + Criticality *SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality,omitempty"` + } `json:"asset"` + + // CriticalityLevel The criticality level of the asset. + CriticalityLevel SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality_level"` + Entity *struct { + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Id string `json:"id"` + } `json:"entity,omitempty"` + Host *struct { + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Name string `json:"name"` + } `json:"host,omitempty"` + IdField SecurityEntityAnalyticsAPIIdField `json:"id_field"` + + // IdValue The ID value of the asset. + IdValue string `json:"id_value"` + Service *struct { + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Name string `json:"name"` + } `json:"service,omitempty"` + User *struct { + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Name string `json:"name"` + } `json:"user,omitempty"` +} + +// SecurityEntityAnalyticsAPIAssetCriticalityRecordEcsParts defines model for Security_Entity_Analytics_API_AssetCriticalityRecordEcsParts. +type SecurityEntityAnalyticsAPIAssetCriticalityRecordEcsParts struct { + Asset struct { + // Criticality The criticality level of the asset. + Criticality *SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality,omitempty"` + } `json:"asset"` + Entity *struct { + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Id string `json:"id"` + } `json:"entity,omitempty"` + Host *struct { + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Name string `json:"name"` + } `json:"host,omitempty"` + Service *struct { + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Name string `json:"name"` + } `json:"service,omitempty"` + User *struct { + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Name string `json:"name"` + } `json:"user,omitempty"` +} + +// SecurityEntityAnalyticsAPIAssetCriticalityRecordIdParts defines model for Security_Entity_Analytics_API_AssetCriticalityRecordIdParts. +type SecurityEntityAnalyticsAPIAssetCriticalityRecordIdParts struct { + IdField SecurityEntityAnalyticsAPIIdField `json:"id_field"` + + // IdValue The ID value of the asset. + IdValue string `json:"id_value"` +} + +// SecurityEntityAnalyticsAPICleanUpRiskEngineErrorResponse defines model for Security_Entity_Analytics_API_CleanUpRiskEngineErrorResponse. +type SecurityEntityAnalyticsAPICleanUpRiskEngineErrorResponse struct { + CleanupSuccessful bool `json:"cleanup_successful"` + Errors []struct { + Error string `json:"error"` + Seq int `json:"seq"` + } `json:"errors"` +} + +// SecurityEntityAnalyticsAPIConfigureRiskEngineSavedObjectErrorResponse defines model for Security_Entity_Analytics_API_ConfigureRiskEngineSavedObjectErrorResponse. +type SecurityEntityAnalyticsAPIConfigureRiskEngineSavedObjectErrorResponse struct { + Errors []struct { + Error string `json:"error"` + Seq int `json:"seq"` + } `json:"errors"` + RiskEngineSavedObjectConfigured bool `json:"risk_engine_saved_object_configured"` +} + +// SecurityEntityAnalyticsAPICreateAssetCriticalityRecord defines model for Security_Entity_Analytics_API_CreateAssetCriticalityRecord. +type SecurityEntityAnalyticsAPICreateAssetCriticalityRecord struct { + // CriticalityLevel The criticality level of the asset. + CriticalityLevel SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality_level"` + IdField SecurityEntityAnalyticsAPIIdField `json:"id_field"` + + // IdValue The ID value of the asset. + IdValue string `json:"id_value"` +} + +// SecurityEntityAnalyticsAPIEngineComponentResource defines model for Security_Entity_Analytics_API_EngineComponentResource. +type SecurityEntityAnalyticsAPIEngineComponentResource string + +// SecurityEntityAnalyticsAPIEngineComponentStatus defines model for Security_Entity_Analytics_API_EngineComponentStatus. +type SecurityEntityAnalyticsAPIEngineComponentStatus struct { + Errors *[]struct { + Message *string `json:"message,omitempty"` + Title *string `json:"title,omitempty"` + } `json:"errors,omitempty"` + Health *SecurityEntityAnalyticsAPIEngineComponentStatusHealth `json:"health,omitempty"` + Id string `json:"id"` + Installed bool `json:"installed"` + Metadata *SecurityEntityAnalyticsAPIMetadata `json:"metadata,omitempty"` + Resource SecurityEntityAnalyticsAPIEngineComponentResource `json:"resource"` +} + +// SecurityEntityAnalyticsAPIEngineComponentStatusHealth defines model for SecurityEntityAnalyticsAPIEngineComponentStatus.Health. +type SecurityEntityAnalyticsAPIEngineComponentStatusHealth string + +// SecurityEntityAnalyticsAPIEngineDataviewUpdateResult defines model for Security_Entity_Analytics_API_EngineDataviewUpdateResult. +type SecurityEntityAnalyticsAPIEngineDataviewUpdateResult struct { + Changes *struct { + IndexPatterns *[]string `json:"indexPatterns,omitempty"` + } `json:"changes,omitempty"` + Type string `json:"type"` +} + +// SecurityEntityAnalyticsAPIEngineDescriptor defines model for Security_Entity_Analytics_API_EngineDescriptor. +type SecurityEntityAnalyticsAPIEngineDescriptor struct { + Delay *string `json:"delay,omitempty"` + DocsPerSecond *int `json:"docsPerSecond,omitempty"` + Error *struct { + Action SecurityEntityAnalyticsAPIEngineDescriptorErrorAction `json:"action"` + Message string `json:"message"` + } `json:"error,omitempty"` + FieldHistoryLength int `json:"fieldHistoryLength"` + Filter *string `json:"filter,omitempty"` + Frequency *string `json:"frequency,omitempty"` + IndexPattern SecurityEntityAnalyticsAPIIndexPattern `json:"indexPattern"` + LookbackPeriod *string `json:"lookbackPeriod,omitempty"` + Status SecurityEntityAnalyticsAPIEngineStatus `json:"status"` + Timeout *string `json:"timeout,omitempty"` + TimestampField *string `json:"timestampField,omitempty"` + Type SecurityEntityAnalyticsAPIEntityType `json:"type"` +} + +// SecurityEntityAnalyticsAPIEngineDescriptorErrorAction defines model for SecurityEntityAnalyticsAPIEngineDescriptor.Error.Action. +type SecurityEntityAnalyticsAPIEngineDescriptorErrorAction string + +// SecurityEntityAnalyticsAPIEngineMetadata defines model for Security_Entity_Analytics_API_EngineMetadata. +type SecurityEntityAnalyticsAPIEngineMetadata struct { + Type string `json:"Type"` +} + +// SecurityEntityAnalyticsAPIEngineStatus defines model for Security_Entity_Analytics_API_EngineStatus. +type SecurityEntityAnalyticsAPIEngineStatus string + +// SecurityEntityAnalyticsAPIEntity defines model for Security_Entity_Analytics_API_Entity. +type SecurityEntityAnalyticsAPIEntity struct { + union json.RawMessage +} + +// SecurityEntityAnalyticsAPIEntityAnalyticsPrivileges defines model for Security_Entity_Analytics_API_EntityAnalyticsPrivileges. +type SecurityEntityAnalyticsAPIEntityAnalyticsPrivileges struct { + HasAllRequired bool `json:"has_all_required"` + HasReadPermissions *bool `json:"has_read_permissions,omitempty"` + HasWritePermissions *bool `json:"has_write_permissions,omitempty"` + Privileges struct { + Elasticsearch struct { + Cluster *map[string]bool `json:"cluster,omitempty"` + Index *map[string]map[string]bool `json:"index,omitempty"` + } `json:"elasticsearch"` + Kibana *map[string]bool `json:"kibana,omitempty"` + } `json:"privileges"` +} + +// SecurityEntityAnalyticsAPIEntityRiskLevels defines model for Security_Entity_Analytics_API_EntityRiskLevels. +type SecurityEntityAnalyticsAPIEntityRiskLevels string + +// SecurityEntityAnalyticsAPIEntityRiskScoreRecord defines model for Security_Entity_Analytics_API_EntityRiskScoreRecord. +type SecurityEntityAnalyticsAPIEntityRiskScoreRecord struct { + // Timestamp The time at which the risk score was calculated. + Timestamp time.Time `json:"@timestamp"` + CalculatedLevel SecurityEntityAnalyticsAPIEntityRiskLevels `json:"calculated_level"` + + // CalculatedScore The raw numeric value of the given entity's risk score. + CalculatedScore float64 `json:"calculated_score"` + + // CalculatedScoreNorm The normalized numeric value of the given entity's risk score. Useful for comparing with other entities. + CalculatedScoreNorm float64 `json:"calculated_score_norm"` + + // Category1Count The number of risk input documents that contributed to the Category 1 score (`category_1_score`). + Category1Count int `json:"category_1_count"` + + // Category1Score The contribution of Category 1 to the overall risk score (`calculated_score`). Category 1 contains Detection Engine Alerts. + Category1Score float64 `json:"category_1_score"` + Category2Count *int `json:"category_2_count,omitempty"` + Category2Score *float64 `json:"category_2_score,omitempty"` + + // CriticalityLevel The criticality level of the asset. + CriticalityLevel *SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality_level,omitempty"` + CriticalityModifier *float64 `json:"criticality_modifier,omitempty"` + + // IdField The identifier field defining this risk score. Coupled with `id_value`, uniquely identifies the entity being scored. + IdField string `json:"id_field"` + + // IdValue The identifier value defining this risk score. Coupled with `id_field`, uniquely identifies the entity being scored. + IdValue string `json:"id_value"` + + // Inputs A list of the highest-risk documents contributing to this risk score. Useful for investigative purposes. + Inputs []SecurityEntityAnalyticsAPIRiskScoreInput `json:"inputs"` + Notes []string `json:"notes"` +} + +// SecurityEntityAnalyticsAPIEntityType defines model for Security_Entity_Analytics_API_EntityType. +type SecurityEntityAnalyticsAPIEntityType string + +// SecurityEntityAnalyticsAPIGenericEntity defines model for Security_Entity_Analytics_API_GenericEntity. +type SecurityEntityAnalyticsAPIGenericEntity struct { + Timestamp *time.Time `json:"@timestamp,omitempty"` + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Entity struct { + EngineMetadata *SecurityEntityAnalyticsAPIEngineMetadata `json:"EngineMetadata,omitempty"` + Category *string `json:"category,omitempty"` + Id string `json:"id"` + Name string `json:"name"` + Source *string `json:"source,omitempty"` + Type string `json:"type"` + } `json:"entity"` +} + +// SecurityEntityAnalyticsAPIHostEntity defines model for Security_Entity_Analytics_API_HostEntity. +type SecurityEntityAnalyticsAPIHostEntity struct { + Timestamp *time.Time `json:"@timestamp,omitempty"` + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Entity struct { + EngineMetadata *SecurityEntityAnalyticsAPIEngineMetadata `json:"EngineMetadata,omitempty"` + Name string `json:"name"` + Source string `json:"source"` + Type string `json:"type"` + } `json:"entity"` + Event *struct { + Ingested *time.Time `json:"ingested,omitempty"` + } `json:"event,omitempty"` + Host struct { + Architecture *[]string `json:"architecture,omitempty"` + Domain *[]string `json:"domain,omitempty"` + Hostname *[]string `json:"hostname,omitempty"` + Id *[]string `json:"id,omitempty"` + Ip *[]string `json:"ip,omitempty"` + Mac *[]string `json:"mac,omitempty"` + Name string `json:"name"` + Risk *SecurityEntityAnalyticsAPIEntityRiskScoreRecord `json:"risk,omitempty"` + Type *[]string `json:"type,omitempty"` + } `json:"host"` +} + +// SecurityEntityAnalyticsAPIIdField defines model for Security_Entity_Analytics_API_IdField. +type SecurityEntityAnalyticsAPIIdField string + +// SecurityEntityAnalyticsAPIIndexPattern defines model for Security_Entity_Analytics_API_IndexPattern. +type SecurityEntityAnalyticsAPIIndexPattern = string + +// SecurityEntityAnalyticsAPIInspectQuery defines model for Security_Entity_Analytics_API_InspectQuery. +type SecurityEntityAnalyticsAPIInspectQuery struct { + Dsl []string `json:"dsl"` + Response []string `json:"response"` +} + +// SecurityEntityAnalyticsAPIInterval Interval in which enrich policy runs. For example, `"1h"` means the rule runs every hour. Must be less than or equal to half the duration of the lookback period, +type SecurityEntityAnalyticsAPIInterval = string + +// SecurityEntityAnalyticsAPIMetadata defines model for Security_Entity_Analytics_API_Metadata. +type SecurityEntityAnalyticsAPIMetadata = SecurityEntityAnalyticsAPITransformStatsMetadata + +// SecurityEntityAnalyticsAPIMonitoredUserDoc defines model for Security_Entity_Analytics_API_MonitoredUserDoc. +type SecurityEntityAnalyticsAPIMonitoredUserDoc struct { + Timestamp *time.Time `json:"@timestamp,omitempty"` + EntityAnalyticsMonitoring *struct { + Labels *[]struct { + Field *string `json:"field,omitempty"` + Source *string `json:"source,omitempty"` + Value *string `json:"value,omitempty"` + } `json:"labels,omitempty"` + } `json:"entity_analytics_monitoring,omitempty"` + Event *struct { + Ingested *time.Time `json:"ingested,omitempty"` + } `json:"event,omitempty"` + Id *string `json:"id,omitempty"` + Labels *struct { + SourceIds *[]string `json:"source_ids,omitempty"` + SourceIntegrations *[]string `json:"source_integrations,omitempty"` + Sources *[]interface{} `json:"sources,omitempty"` + } `json:"labels,omitempty"` + User *struct { + // IsPrivileged Indicates if the user is privileged. + IsPrivileged *bool `json:"is_privileged,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"user,omitempty"` +} + +// SecurityEntityAnalyticsAPIMonitoringEngineDescriptor defines model for Security_Entity_Analytics_API_MonitoringEngineDescriptor. +type SecurityEntityAnalyticsAPIMonitoringEngineDescriptor struct { + Status SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatus `json:"status"` +} + +// SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatus defines model for Security_Entity_Analytics_API_PrivilegeMonitoringEngineStatus. +type SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatus string + +// SecurityEntityAnalyticsAPIPrivmonUserCsvUploadErrorItem defines model for Security_Entity_Analytics_API_PrivmonUserCsvUploadErrorItem. +type SecurityEntityAnalyticsAPIPrivmonUserCsvUploadErrorItem struct { + Index *int `json:"index,omitempty"` + Message string `json:"message"` + Username *string `json:"username,omitempty"` +} + +// SecurityEntityAnalyticsAPIPrivmonUserCsvUploadStats defines model for Security_Entity_Analytics_API_PrivmonUserCsvUploadStats. +type SecurityEntityAnalyticsAPIPrivmonUserCsvUploadStats struct { + Failed int `json:"failed"` + Successful int `json:"successful"` + Total int `json:"total"` +} + +// SecurityEntityAnalyticsAPIRiskEngineScheduleNowErrorResponse defines model for Security_Entity_Analytics_API_RiskEngineScheduleNowErrorResponse. +type SecurityEntityAnalyticsAPIRiskEngineScheduleNowErrorResponse struct { + FullError string `json:"full_error"` + Message string `json:"message"` +} + +// SecurityEntityAnalyticsAPIRiskEngineScheduleNowResponse defines model for Security_Entity_Analytics_API_RiskEngineScheduleNowResponse. +type SecurityEntityAnalyticsAPIRiskEngineScheduleNowResponse struct { + Success *bool `json:"success,omitempty"` +} + +// SecurityEntityAnalyticsAPIRiskScoreInput A generic representation of a document contributing to a Risk Score. +type SecurityEntityAnalyticsAPIRiskScoreInput struct { + // Category The risk category of the risk input document. + Category string `json:"category"` + ContributionScore *float64 `json:"contribution_score,omitempty"` + + // Description A human-readable description of the risk input document. + Description string `json:"description"` + + // Id The unique identifier (`_id`) of the original source document + Id string `json:"id"` + + // Index The unique index (`_index`) of the original source document + Index string `json:"index"` + + // RiskScore The weighted risk score of the risk input document. + RiskScore *float64 `json:"risk_score,omitempty"` + + // Timestamp The @timestamp of the risk input document. + Timestamp *string `json:"timestamp,omitempty"` +} + +// SecurityEntityAnalyticsAPIServiceEntity defines model for Security_Entity_Analytics_API_ServiceEntity. +type SecurityEntityAnalyticsAPIServiceEntity struct { + Timestamp *time.Time `json:"@timestamp,omitempty"` + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Entity struct { + EngineMetadata *SecurityEntityAnalyticsAPIEngineMetadata `json:"EngineMetadata,omitempty"` + Name string `json:"name"` + Source string `json:"source"` + Type string `json:"type"` + } `json:"entity"` + Event *struct { + Ingested *time.Time `json:"ingested,omitempty"` + } `json:"event,omitempty"` + Service struct { + Name string `json:"name"` + Risk *SecurityEntityAnalyticsAPIEntityRiskScoreRecord `json:"risk,omitempty"` + } `json:"service"` +} + +// SecurityEntityAnalyticsAPIStoreStatus defines model for Security_Entity_Analytics_API_StoreStatus. +type SecurityEntityAnalyticsAPIStoreStatus string + +// SecurityEntityAnalyticsAPITaskManagerUnavailableResponse Task manager is unavailable +type SecurityEntityAnalyticsAPITaskManagerUnavailableResponse struct { + Message string `json:"message"` + StatusCode int `json:"status_code"` +} + +// SecurityEntityAnalyticsAPITransformStatsMetadata defines model for Security_Entity_Analytics_API_TransformStatsMetadata. +type SecurityEntityAnalyticsAPITransformStatsMetadata struct { + DeleteTimeInMs *int `json:"delete_time_in_ms,omitempty"` + DocumentsDeleted *int `json:"documents_deleted,omitempty"` + DocumentsIndexed int `json:"documents_indexed"` + DocumentsProcessed int `json:"documents_processed"` + ExponentialAvgCheckpointDurationMs int `json:"exponential_avg_checkpoint_duration_ms"` + ExponentialAvgDocumentsIndexed int `json:"exponential_avg_documents_indexed"` + ExponentialAvgDocumentsProcessed int `json:"exponential_avg_documents_processed"` + IndexFailures int `json:"index_failures"` + IndexTimeInMs int `json:"index_time_in_ms"` + IndexTotal int `json:"index_total"` + PagesProcessed int `json:"pages_processed"` + ProcessingTimeInMs int `json:"processing_time_in_ms"` + ProcessingTotal int `json:"processing_total"` + SearchFailures int `json:"search_failures"` + SearchTimeInMs int `json:"search_time_in_ms"` + SearchTotal int `json:"search_total"` + TriggerCount int `json:"trigger_count"` +} + +// SecurityEntityAnalyticsAPIUserEntity defines model for Security_Entity_Analytics_API_UserEntity. +type SecurityEntityAnalyticsAPIUserEntity struct { + Timestamp *time.Time `json:"@timestamp,omitempty"` + Asset *struct { + // Criticality The criticality level of the asset. + Criticality SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality"` + } `json:"asset,omitempty"` + Entity struct { + EngineMetadata *SecurityEntityAnalyticsAPIEngineMetadata `json:"EngineMetadata,omitempty"` + Name string `json:"name"` + Source string `json:"source"` + Type string `json:"type"` + } `json:"entity"` + Event *struct { + Ingested *time.Time `json:"ingested,omitempty"` + } `json:"event,omitempty"` + User struct { + Domain *[]string `json:"domain,omitempty"` + Email *[]string `json:"email,omitempty"` + FullName *[]string `json:"full_name,omitempty"` + Hash *[]string `json:"hash,omitempty"` + Id *[]string `json:"id,omitempty"` + Name string `json:"name"` + Risk *SecurityEntityAnalyticsAPIEntityRiskScoreRecord `json:"risk,omitempty"` + Roles *[]string `json:"roles,omitempty"` + } `json:"user"` +} + +// SecurityEntityAnalyticsAPIUserName defines model for Security_Entity_Analytics_API_UserName. +type SecurityEntityAnalyticsAPIUserName struct { + User *struct { + // Name The name of the user. + Name *string `json:"name,omitempty"` + } `json:"user,omitempty"` +} + +// SecurityExceptionsAPICreateExceptionListItemComment defines model for Security_Exceptions_API_CreateExceptionListItemComment. +type SecurityExceptionsAPICreateExceptionListItemComment struct { + // Comment A string that does not contain only whitespace characters + Comment SecurityExceptionsAPINonEmptyString `json:"comment"` +} + +// SecurityExceptionsAPICreateExceptionListItemCommentArray defines model for Security_Exceptions_API_CreateExceptionListItemCommentArray. +type SecurityExceptionsAPICreateExceptionListItemCommentArray = []SecurityExceptionsAPICreateExceptionListItemComment + +// SecurityExceptionsAPICreateRuleExceptionListItemComment defines model for Security_Exceptions_API_CreateRuleExceptionListItemComment. +type SecurityExceptionsAPICreateRuleExceptionListItemComment struct { + // Comment A string that does not contain only whitespace characters + Comment SecurityExceptionsAPINonEmptyString `json:"comment"` +} + +// SecurityExceptionsAPICreateRuleExceptionListItemCommentArray defines model for Security_Exceptions_API_CreateRuleExceptionListItemCommentArray. +type SecurityExceptionsAPICreateRuleExceptionListItemCommentArray = []SecurityExceptionsAPICreateRuleExceptionListItemComment + +// SecurityExceptionsAPICreateRuleExceptionListItemProps defines model for Security_Exceptions_API_CreateRuleExceptionListItemProps. +type SecurityExceptionsAPICreateRuleExceptionListItemProps struct { + Comments *SecurityExceptionsAPICreateRuleExceptionListItemCommentArray `json:"comments,omitempty"` + + // Description Describes the exception list. + Description SecurityExceptionsAPIExceptionListItemDescription `json:"description"` + Entries SecurityExceptionsAPIExceptionListItemEntryArray `json:"entries"` + ExpireTime *time.Time `json:"expire_time,omitempty"` + + // ItemId Human readable string identifier, e.g. `trusted-linux-processes` + ItemId *SecurityExceptionsAPIExceptionListItemHumanId `json:"item_id,omitempty"` + Meta *SecurityExceptionsAPIExceptionListItemMeta `json:"meta,omitempty"` + + // Name Exception list name. + Name SecurityExceptionsAPIExceptionListItemName `json:"name"` + + // NamespaceType Determines whether the exception container is available in all Kibana spaces or just the space + // in which it is created, where: + // + // - `single`: Only available in the Kibana space in which it is created. + // - `agnostic`: Available in all Kibana spaces. + NamespaceType *SecurityExceptionsAPIExceptionNamespaceType `json:"namespace_type,omitempty"` + OsTypes *SecurityExceptionsAPIExceptionListItemOsTypeArray `json:"os_types,omitempty"` + Tags *SecurityExceptionsAPIExceptionListItemTags `json:"tags,omitempty"` + Type SecurityExceptionsAPIExceptionListItemType `json:"type"` +} + +// SecurityExceptionsAPIExceptionList defines model for Security_Exceptions_API_ExceptionList. +type SecurityExceptionsAPIExceptionList struct { + // UnderscoreVersion The version id, normally returned by the API when the item was retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *string `json:"_version,omitempty"` + + // CreatedAt Autogenerated date of object creation. + CreatedAt time.Time `json:"created_at"` + + // CreatedBy Autogenerated value - user that created object. + CreatedBy string `json:"created_by"` + + // Description Describes the exception list. + Description SecurityExceptionsAPIExceptionListDescription `json:"description"` + + // Id Exception list's identifier. + Id SecurityExceptionsAPIExceptionListId `json:"id"` + Immutable bool `json:"immutable"` + + // ListId The exception list's human readable string identifier, `endpoint_list`. + ListId SecurityExceptionsAPIExceptionListHumanId `json:"list_id"` + + // Meta Placeholder for metadata about the list container. + Meta *SecurityExceptionsAPIExceptionListMeta `json:"meta,omitempty"` + + // Name The name of the exception list. + Name SecurityExceptionsAPIExceptionListName `json:"name"` + + // NamespaceType Determines whether the exception container is available in all Kibana spaces or just the space + // in which it is created, where: + // + // - `single`: Only available in the Kibana space in which it is created. + // - `agnostic`: Available in all Kibana spaces. + NamespaceType SecurityExceptionsAPIExceptionNamespaceType `json:"namespace_type"` + + // OsTypes Use this field to specify the operating system. Only enter one value. + OsTypes *SecurityExceptionsAPIExceptionListOsTypeArray `json:"os_types,omitempty"` + + // Tags String array containing words and phrases to help categorize exception containers. + Tags *SecurityExceptionsAPIExceptionListTags `json:"tags,omitempty"` + + // TieBreakerId Field used in search to ensure all containers are sorted and returned correctly. + TieBreakerId string `json:"tie_breaker_id"` + + // Type The type of exception list to be created. Different list types may denote where they can be utilized. + Type SecurityExceptionsAPIExceptionListType `json:"type"` + + // UpdatedAt Autogenerated date of last object update. + UpdatedAt time.Time `json:"updated_at"` + + // UpdatedBy Autogenerated value - user that last updated object. + UpdatedBy string `json:"updated_by"` + + // Version The document version, automatically increasd on updates. + Version SecurityExceptionsAPIExceptionListVersion `json:"version"` +} + +// SecurityExceptionsAPIExceptionListDescription Describes the exception list. +type SecurityExceptionsAPIExceptionListDescription = string + +// SecurityExceptionsAPIExceptionListHumanId The exception list's human readable string identifier, `endpoint_list`. +type SecurityExceptionsAPIExceptionListHumanId = string + +// SecurityExceptionsAPIExceptionListId Exception list's identifier. +type SecurityExceptionsAPIExceptionListId = string + +// SecurityExceptionsAPIExceptionListItem defines model for Security_Exceptions_API_ExceptionListItem. +type SecurityExceptionsAPIExceptionListItem struct { + // UnderscoreVersion The version id, normally returned by the API when the item was retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *string `json:"_version,omitempty"` + + // Comments Array of comment fields: + // + // - comment (string): Comments about the exception item. + Comments SecurityExceptionsAPIExceptionListItemCommentArray `json:"comments"` + + // CreatedAt Autogenerated date of object creation. + CreatedAt time.Time `json:"created_at"` + + // CreatedBy Autogenerated value - user that created object. + CreatedBy string `json:"created_by"` + + // Description Describes the exception list. + Description SecurityExceptionsAPIExceptionListItemDescription `json:"description"` + Entries SecurityExceptionsAPIExceptionListItemEntryArray `json:"entries"` + + // ExpireTime The exception item’s expiration date, in ISO format. This field is only available for regular exception items, not endpoint exceptions. + ExpireTime *SecurityExceptionsAPIExceptionListItemExpireTime `json:"expire_time,omitempty"` + + // Id Exception's identifier. + Id SecurityExceptionsAPIExceptionListItemId `json:"id"` + + // ItemId Human readable string identifier, e.g. `trusted-linux-processes` + ItemId SecurityExceptionsAPIExceptionListItemHumanId `json:"item_id"` + + // ListId The exception list's human readable string identifier, `endpoint_list`. + ListId SecurityExceptionsAPIExceptionListHumanId `json:"list_id"` + Meta *SecurityExceptionsAPIExceptionListItemMeta `json:"meta,omitempty"` + + // Name Exception list name. + Name SecurityExceptionsAPIExceptionListItemName `json:"name"` + + // NamespaceType Determines whether the exception container is available in all Kibana spaces or just the space + // in which it is created, where: + // + // - `single`: Only available in the Kibana space in which it is created. + // - `agnostic`: Available in all Kibana spaces. + NamespaceType SecurityExceptionsAPIExceptionNamespaceType `json:"namespace_type"` + OsTypes *SecurityExceptionsAPIExceptionListItemOsTypeArray `json:"os_types,omitempty"` + Tags *SecurityExceptionsAPIExceptionListItemTags `json:"tags,omitempty"` + + // TieBreakerId Field used in search to ensure all containers are sorted and returned correctly. + TieBreakerId string `json:"tie_breaker_id"` + Type SecurityExceptionsAPIExceptionListItemType `json:"type"` + + // UpdatedAt Autogenerated date of last object update. + UpdatedAt time.Time `json:"updated_at"` + + // UpdatedBy Autogenerated value - user that last updated object. + UpdatedBy string `json:"updated_by"` +} + +// SecurityExceptionsAPIExceptionListItemComment defines model for Security_Exceptions_API_ExceptionListItemComment. +type SecurityExceptionsAPIExceptionListItemComment struct { + // Comment A string that does not contain only whitespace characters + Comment SecurityExceptionsAPINonEmptyString `json:"comment"` + + // CreatedAt Autogenerated date of object creation. + CreatedAt time.Time `json:"created_at"` + + // CreatedBy A string that does not contain only whitespace characters + CreatedBy SecurityExceptionsAPINonEmptyString `json:"created_by"` + + // Id A string that does not contain only whitespace characters + Id SecurityExceptionsAPINonEmptyString `json:"id"` + + // UpdatedAt Autogenerated date of last object update. + UpdatedAt *time.Time `json:"updated_at,omitempty"` + + // UpdatedBy A string that does not contain only whitespace characters + UpdatedBy *SecurityExceptionsAPINonEmptyString `json:"updated_by,omitempty"` +} + +// SecurityExceptionsAPIExceptionListItemCommentArray Array of comment fields: +// +// - comment (string): Comments about the exception item. +type SecurityExceptionsAPIExceptionListItemCommentArray = []SecurityExceptionsAPIExceptionListItemComment + +// SecurityExceptionsAPIExceptionListItemDescription Describes the exception list. +type SecurityExceptionsAPIExceptionListItemDescription = string + +// SecurityExceptionsAPIExceptionListItemEntry defines model for Security_Exceptions_API_ExceptionListItemEntry. +type SecurityExceptionsAPIExceptionListItemEntry struct { + union json.RawMessage +} + +// SecurityExceptionsAPIExceptionListItemEntryArray defines model for Security_Exceptions_API_ExceptionListItemEntryArray. +type SecurityExceptionsAPIExceptionListItemEntryArray = []SecurityExceptionsAPIExceptionListItemEntry + +// SecurityExceptionsAPIExceptionListItemEntryExists defines model for Security_Exceptions_API_ExceptionListItemEntryExists. +type SecurityExceptionsAPIExceptionListItemEntryExists struct { + // Field A string that does not contain only whitespace characters + Field SecurityExceptionsAPINonEmptyString `json:"field"` + Operator SecurityExceptionsAPIExceptionListItemEntryOperator `json:"operator"` + Type SecurityExceptionsAPIExceptionListItemEntryExistsType `json:"type"` +} + +// SecurityExceptionsAPIExceptionListItemEntryExistsType defines model for SecurityExceptionsAPIExceptionListItemEntryExists.Type. +type SecurityExceptionsAPIExceptionListItemEntryExistsType string + +// SecurityExceptionsAPIExceptionListItemEntryList defines model for Security_Exceptions_API_ExceptionListItemEntryList. +type SecurityExceptionsAPIExceptionListItemEntryList struct { + // Field A string that does not contain only whitespace characters + Field SecurityExceptionsAPINonEmptyString `json:"field"` + List struct { + // Id Value list's identifier. + Id SecurityExceptionsAPIListId `json:"id"` + + // Type Specifies the Elasticsearch data type of excludes the list container holds. Some common examples: + // + // - `keyword`: Many ECS fields are Elasticsearch keywords + // - `ip`: IP addresses + // - `ip_range`: Range of IP addresses (supports IPv4, IPv6, and CIDR notation) + Type SecurityExceptionsAPIListType `json:"type"` + } `json:"list"` + Operator SecurityExceptionsAPIExceptionListItemEntryOperator `json:"operator"` + Type SecurityExceptionsAPIExceptionListItemEntryListType `json:"type"` +} + +// SecurityExceptionsAPIExceptionListItemEntryListType defines model for SecurityExceptionsAPIExceptionListItemEntryList.Type. +type SecurityExceptionsAPIExceptionListItemEntryListType string + +// SecurityExceptionsAPIExceptionListItemEntryMatch defines model for Security_Exceptions_API_ExceptionListItemEntryMatch. +type SecurityExceptionsAPIExceptionListItemEntryMatch struct { + // Field A string that does not contain only whitespace characters + Field SecurityExceptionsAPINonEmptyString `json:"field"` + Operator SecurityExceptionsAPIExceptionListItemEntryOperator `json:"operator"` + Type SecurityExceptionsAPIExceptionListItemEntryMatchType `json:"type"` + + // Value A string that does not contain only whitespace characters + Value SecurityExceptionsAPINonEmptyString `json:"value"` +} + +// SecurityExceptionsAPIExceptionListItemEntryMatchType defines model for SecurityExceptionsAPIExceptionListItemEntryMatch.Type. +type SecurityExceptionsAPIExceptionListItemEntryMatchType string + +// SecurityExceptionsAPIExceptionListItemEntryMatchAny defines model for Security_Exceptions_API_ExceptionListItemEntryMatchAny. +type SecurityExceptionsAPIExceptionListItemEntryMatchAny struct { + // Field A string that does not contain only whitespace characters + Field SecurityExceptionsAPINonEmptyString `json:"field"` + Operator SecurityExceptionsAPIExceptionListItemEntryOperator `json:"operator"` + Type SecurityExceptionsAPIExceptionListItemEntryMatchAnyType `json:"type"` + Value []SecurityExceptionsAPINonEmptyString `json:"value"` +} + +// SecurityExceptionsAPIExceptionListItemEntryMatchAnyType defines model for SecurityExceptionsAPIExceptionListItemEntryMatchAny.Type. +type SecurityExceptionsAPIExceptionListItemEntryMatchAnyType string + +// SecurityExceptionsAPIExceptionListItemEntryMatchWildcard defines model for Security_Exceptions_API_ExceptionListItemEntryMatchWildcard. +type SecurityExceptionsAPIExceptionListItemEntryMatchWildcard struct { + // Field A string that does not contain only whitespace characters + Field SecurityExceptionsAPINonEmptyString `json:"field"` + Operator SecurityExceptionsAPIExceptionListItemEntryOperator `json:"operator"` + Type SecurityExceptionsAPIExceptionListItemEntryMatchWildcardType `json:"type"` + + // Value A string that does not contain only whitespace characters + Value SecurityExceptionsAPINonEmptyString `json:"value"` +} + +// SecurityExceptionsAPIExceptionListItemEntryMatchWildcardType defines model for SecurityExceptionsAPIExceptionListItemEntryMatchWildcard.Type. +type SecurityExceptionsAPIExceptionListItemEntryMatchWildcardType string + +// SecurityExceptionsAPIExceptionListItemEntryNested defines model for Security_Exceptions_API_ExceptionListItemEntryNested. +type SecurityExceptionsAPIExceptionListItemEntryNested struct { + Entries []SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem `json:"entries"` + + // Field A string that does not contain only whitespace characters + Field SecurityExceptionsAPINonEmptyString `json:"field"` + Type SecurityExceptionsAPIExceptionListItemEntryNestedType `json:"type"` +} + +// SecurityExceptionsAPIExceptionListItemEntryNestedType defines model for SecurityExceptionsAPIExceptionListItemEntryNested.Type. +type SecurityExceptionsAPIExceptionListItemEntryNestedType string + +// SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem defines model for Security_Exceptions_API_ExceptionListItemEntryNestedEntryItem. +type SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem struct { + union json.RawMessage +} + +// SecurityExceptionsAPIExceptionListItemEntryOperator defines model for Security_Exceptions_API_ExceptionListItemEntryOperator. +type SecurityExceptionsAPIExceptionListItemEntryOperator string + +// SecurityExceptionsAPIExceptionListItemExpireTime The exception item’s expiration date, in ISO format. This field is only available for regular exception items, not endpoint exceptions. +type SecurityExceptionsAPIExceptionListItemExpireTime = time.Time + +// SecurityExceptionsAPIExceptionListItemHumanId Human readable string identifier, e.g. `trusted-linux-processes` +type SecurityExceptionsAPIExceptionListItemHumanId = string + +// SecurityExceptionsAPIExceptionListItemId Exception's identifier. +type SecurityExceptionsAPIExceptionListItemId = string + +// SecurityExceptionsAPIExceptionListItemMeta defines model for Security_Exceptions_API_ExceptionListItemMeta. +type SecurityExceptionsAPIExceptionListItemMeta map[string]interface{} + +// SecurityExceptionsAPIExceptionListItemName Exception list name. +type SecurityExceptionsAPIExceptionListItemName = string + +// SecurityExceptionsAPIExceptionListItemOsTypeArray defines model for Security_Exceptions_API_ExceptionListItemOsTypeArray. +type SecurityExceptionsAPIExceptionListItemOsTypeArray = []SecurityExceptionsAPIExceptionListOsType + +// SecurityExceptionsAPIExceptionListItemTags defines model for Security_Exceptions_API_ExceptionListItemTags. +type SecurityExceptionsAPIExceptionListItemTags = []string + +// SecurityExceptionsAPIExceptionListItemType defines model for Security_Exceptions_API_ExceptionListItemType. +type SecurityExceptionsAPIExceptionListItemType string + +// SecurityExceptionsAPIExceptionListMeta Placeholder for metadata about the list container. +type SecurityExceptionsAPIExceptionListMeta map[string]interface{} + +// SecurityExceptionsAPIExceptionListName The name of the exception list. +type SecurityExceptionsAPIExceptionListName = string + +// SecurityExceptionsAPIExceptionListOsType Use this field to specify the operating system. +type SecurityExceptionsAPIExceptionListOsType string + +// SecurityExceptionsAPIExceptionListOsTypeArray Use this field to specify the operating system. Only enter one value. +type SecurityExceptionsAPIExceptionListOsTypeArray = []SecurityExceptionsAPIExceptionListOsType + +// SecurityExceptionsAPIExceptionListTags String array containing words and phrases to help categorize exception containers. +type SecurityExceptionsAPIExceptionListTags = []string + +// SecurityExceptionsAPIExceptionListType The type of exception list to be created. Different list types may denote where they can be utilized. +type SecurityExceptionsAPIExceptionListType string + +// SecurityExceptionsAPIExceptionListVersion The document version, automatically increasd on updates. +type SecurityExceptionsAPIExceptionListVersion = int + +// SecurityExceptionsAPIExceptionListsImportBulkError defines model for Security_Exceptions_API_ExceptionListsImportBulkError. +type SecurityExceptionsAPIExceptionListsImportBulkError struct { + Error struct { + Message string `json:"message"` + StatusCode int `json:"status_code"` + } `json:"error"` + + // Id Exception list's identifier. + Id *SecurityExceptionsAPIExceptionListId `json:"id,omitempty"` + + // ItemId Human readable string identifier, e.g. `trusted-linux-processes` + ItemId *SecurityExceptionsAPIExceptionListItemHumanId `json:"item_id,omitempty"` + + // ListId The exception list's human readable string identifier, `endpoint_list`. + ListId *SecurityExceptionsAPIExceptionListHumanId `json:"list_id,omitempty"` +} + +// SecurityExceptionsAPIExceptionListsImportBulkErrorArray defines model for Security_Exceptions_API_ExceptionListsImportBulkErrorArray. +type SecurityExceptionsAPIExceptionListsImportBulkErrorArray = []SecurityExceptionsAPIExceptionListsImportBulkError + +// SecurityExceptionsAPIExceptionNamespaceType Determines whether the exception container is available in all Kibana spaces or just the space +// in which it is created, where: +// +// - `single`: Only available in the Kibana space in which it is created. +// - `agnostic`: Available in all Kibana spaces. +type SecurityExceptionsAPIExceptionNamespaceType string + +// SecurityExceptionsAPIFindExceptionListItemsFilter A string that does not contain only whitespace characters +type SecurityExceptionsAPIFindExceptionListItemsFilter = SecurityExceptionsAPINonEmptyString + +// SecurityExceptionsAPIFindExceptionListsFilter defines model for Security_Exceptions_API_FindExceptionListsFilter. +type SecurityExceptionsAPIFindExceptionListsFilter = string + +// SecurityExceptionsAPIListId Value list's identifier. +type SecurityExceptionsAPIListId = string + +// SecurityExceptionsAPIListType Specifies the Elasticsearch data type of excludes the list container holds. Some common examples: +// +// - `keyword`: Many ECS fields are Elasticsearch keywords +// - `ip`: IP addresses +// - `ip_range`: Range of IP addresses (supports IPv4, IPv6, and CIDR notation) +type SecurityExceptionsAPIListType string + +// SecurityExceptionsAPINonEmptyString A string that does not contain only whitespace characters +type SecurityExceptionsAPINonEmptyString = string + +// SecurityExceptionsAPIPlatformErrorResponse defines model for Security_Exceptions_API_PlatformErrorResponse. +type SecurityExceptionsAPIPlatformErrorResponse struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode int `json:"statusCode"` +} + +// SecurityExceptionsAPIRuleId A universally unique identifier +type SecurityExceptionsAPIRuleId = SecurityExceptionsAPIUUID + +// SecurityExceptionsAPISiemErrorResponse defines model for Security_Exceptions_API_SiemErrorResponse. +type SecurityExceptionsAPISiemErrorResponse struct { + Message string `json:"message"` + StatusCode int `json:"status_code"` +} + +// SecurityExceptionsAPIUUID A universally unique identifier +type SecurityExceptionsAPIUUID = openapi_types.UUID + +// SecurityExceptionsAPIUpdateExceptionListItemComment defines model for Security_Exceptions_API_UpdateExceptionListItemComment. +type SecurityExceptionsAPIUpdateExceptionListItemComment struct { + // Comment A string that does not contain only whitespace characters + Comment SecurityExceptionsAPINonEmptyString `json:"comment"` + + // Id A string that does not contain only whitespace characters + Id *SecurityExceptionsAPINonEmptyString `json:"id,omitempty"` +} + +// SecurityExceptionsAPIUpdateExceptionListItemCommentArray defines model for Security_Exceptions_API_UpdateExceptionListItemCommentArray. +type SecurityExceptionsAPIUpdateExceptionListItemCommentArray = []SecurityExceptionsAPIUpdateExceptionListItemComment + +// SecurityListsAPIFindListItemsCursor Returns the items that come after the last item returned in the previous call (use the `cursor` value returned in the previous call). This parameter uses the `tie_breaker_id` field to ensure all items are sorted and returned correctly. +type SecurityListsAPIFindListItemsCursor = string + +// SecurityListsAPIFindListItemsFilter defines model for Security_Lists_API_FindListItemsFilter. +type SecurityListsAPIFindListItemsFilter = string + +// SecurityListsAPIFindListsCursor defines model for Security_Lists_API_FindListsCursor. +type SecurityListsAPIFindListsCursor = string + +// SecurityListsAPIFindListsFilter defines model for Security_Lists_API_FindListsFilter. +type SecurityListsAPIFindListsFilter = string + +// SecurityListsAPIList defines model for Security_Lists_API_List. +type SecurityListsAPIList struct { + Timestamp *time.Time `json:"@timestamp,omitempty"` + + // UnderscoreVersion The version id, normally returned by the API when the document is retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *SecurityListsAPIListVersionId `json:"_version,omitempty"` + + // CreatedAt Autogenerated date of object creation. + CreatedAt time.Time `json:"created_at"` + + // CreatedBy Autogenerated value - user that created object. + CreatedBy string `json:"created_by"` + + // Description Describes the value list. + Description SecurityListsAPIListDescription `json:"description"` + + // Deserializer Determines how retrieved list item values are presented. By default list items are presented using these Handelbar expressions: + // + // - `{{{value}}}` - Single value item types, such as `ip`, `long`, `date`, `keyword`, and `text`. + // - `{{{gte}}}-{{{lte}}}` - Range value item types, such as `ip_range`, `double_range`, `float_range`, `integer_range`, and `long_range`. + // - `{{{gte}}},{{{lte}}}` - Date range values. + Deserializer *SecurityListsAPIListDeserializer `json:"deserializer,omitempty"` + + // Id Value list's identifier. + Id SecurityListsAPIListId `json:"id"` + Immutable bool `json:"immutable"` + + // Meta Placeholder for metadata about the value list. + Meta *SecurityListsAPIListMetadata `json:"meta,omitempty"` + + // Name Value list's name. + Name SecurityListsAPIListName `json:"name"` + + // Serializer Determines how uploaded list item values are parsed. By default, list items are parsed using these named regex groups: + // + // - `(?.+)` - Single value item types, such as ip, long, date, keyword, and text. + // - `(?.+)-(?.+)|(?.+)` - Range value item types, such as `date_range`, `ip_range`, `double_range`, `float_range`, `integer_range`, and `long_range`. + Serializer *SecurityListsAPIListSerializer `json:"serializer,omitempty"` + + // TieBreakerId Field used in search to ensure all containers are sorted and returned correctly. + TieBreakerId string `json:"tie_breaker_id"` + + // Type Specifies the Elasticsearch data type of excludes the list container holds. Some common examples: + // + // - `keyword`: Many ECS fields are Elasticsearch keywords + // - `ip`: IP addresses + // - `ip_range`: Range of IP addresses (supports IPv4, IPv6, and CIDR notation) + Type SecurityListsAPIListType `json:"type"` + + // UpdatedAt Autogenerated date of last object update. + UpdatedAt time.Time `json:"updated_at"` + + // UpdatedBy Autogenerated value - user that last updated object. + UpdatedBy string `json:"updated_by"` + + // Version The document version number. + Version SecurityListsAPIListVersion `json:"version"` +} + +// SecurityListsAPIListDescription Describes the value list. +type SecurityListsAPIListDescription = string + +// SecurityListsAPIListDeserializer Determines how retrieved list item values are presented. By default list items are presented using these Handelbar expressions: +// +// - `{{{value}}}` - Single value item types, such as `ip`, `long`, `date`, `keyword`, and `text`. +// - `{{{gte}}}-{{{lte}}}` - Range value item types, such as `ip_range`, `double_range`, `float_range`, `integer_range`, and `long_range`. +// - `{{{gte}}},{{{lte}}}` - Date range values. +type SecurityListsAPIListDeserializer = string + +// SecurityListsAPIListId Value list's identifier. +type SecurityListsAPIListId = string + +// SecurityListsAPIListItem defines model for Security_Lists_API_ListItem. +type SecurityListsAPIListItem struct { + Timestamp *time.Time `json:"@timestamp,omitempty"` + + // UnderscoreVersion The version id, normally returned by the API when the document is retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *SecurityListsAPIListVersionId `json:"_version,omitempty"` + + // CreatedAt Autogenerated date of object creation. + CreatedAt time.Time `json:"created_at"` + + // CreatedBy Autogenerated value - user that created object. + CreatedBy string `json:"created_by"` + + // Deserializer Determines how retrieved list item values are presented. By default list items are presented using these Handelbar expressions: + // + // - `{{{value}}}` - Single value item types, such as `ip`, `long`, `date`, `keyword`, and `text`. + // - `{{{gte}}}-{{{lte}}}` - Range value item types, such as `ip_range`, `double_range`, `float_range`, `integer_range`, and `long_range`. + // - `{{{gte}}},{{{lte}}}` - Date range values. + Deserializer *SecurityListsAPIListDeserializer `json:"deserializer,omitempty"` + + // Id Value list item's identifier. + Id SecurityListsAPIListItemId `json:"id"` + + // ListId Value list's identifier. + ListId SecurityListsAPIListId `json:"list_id"` + + // Meta Placeholder for metadata about the value list item. + Meta *SecurityListsAPIListItemMetadata `json:"meta,omitempty"` + + // Serializer Determines how uploaded list item values are parsed. By default, list items are parsed using these named regex groups: + // + // - `(?.+)` - Single value item types, such as ip, long, date, keyword, and text. + // - `(?.+)-(?.+)|(?.+)` - Range value item types, such as `date_range`, `ip_range`, `double_range`, `float_range`, `integer_range`, and `long_range`. + Serializer *SecurityListsAPIListSerializer `json:"serializer,omitempty"` + + // TieBreakerId Field used in search to ensure all containers are sorted and returned correctly. + TieBreakerId string `json:"tie_breaker_id"` + + // Type Specifies the Elasticsearch data type of excludes the list container holds. Some common examples: + // + // - `keyword`: Many ECS fields are Elasticsearch keywords + // - `ip`: IP addresses + // - `ip_range`: Range of IP addresses (supports IPv4, IPv6, and CIDR notation) + Type SecurityListsAPIListType `json:"type"` + + // UpdatedAt Autogenerated date of last object update. + UpdatedAt time.Time `json:"updated_at"` + + // UpdatedBy Autogenerated value - user that last updated object. + UpdatedBy string `json:"updated_by"` + + // Value The value used to evaluate exceptions. + Value SecurityListsAPIListItemValue `json:"value"` +} + +// SecurityListsAPIListItemId Value list item's identifier. +type SecurityListsAPIListItemId = string + +// SecurityListsAPIListItemMetadata Placeholder for metadata about the value list item. +type SecurityListsAPIListItemMetadata map[string]interface{} + +// SecurityListsAPIListItemPrivileges defines model for Security_Lists_API_ListItemPrivileges. +type SecurityListsAPIListItemPrivileges struct { + Application map[string]bool `json:"application"` + Cluster map[string]bool `json:"cluster"` + HasAllRequested bool `json:"has_all_requested"` + Index map[string]map[string]bool `json:"index"` + Username string `json:"username"` +} + +// SecurityListsAPIListItemValue The value used to evaluate exceptions. +type SecurityListsAPIListItemValue = string + +// SecurityListsAPIListMetadata Placeholder for metadata about the value list. +type SecurityListsAPIListMetadata map[string]interface{} + +// SecurityListsAPIListName Value list's name. +type SecurityListsAPIListName = string + +// SecurityListsAPIListPrivileges defines model for Security_Lists_API_ListPrivileges. +type SecurityListsAPIListPrivileges struct { + Application map[string]bool `json:"application"` + Cluster map[string]bool `json:"cluster"` + HasAllRequested bool `json:"has_all_requested"` + Index map[string]map[string]bool `json:"index"` + Username string `json:"username"` +} + +// SecurityListsAPIListSerializer Determines how uploaded list item values are parsed. By default, list items are parsed using these named regex groups: +// +// - `(?.+)` - Single value item types, such as ip, long, date, keyword, and text. +// - `(?.+)-(?.+)|(?.+)` - Range value item types, such as `date_range`, `ip_range`, `double_range`, `float_range`, `integer_range`, and `long_range`. +type SecurityListsAPIListSerializer = string + +// SecurityListsAPIListType Specifies the Elasticsearch data type of excludes the list container holds. Some common examples: +// +// - `keyword`: Many ECS fields are Elasticsearch keywords +// - `ip`: IP addresses +// - `ip_range`: Range of IP addresses (supports IPv4, IPv6, and CIDR notation) +type SecurityListsAPIListType string + +// SecurityListsAPIListVersion The document version number. +type SecurityListsAPIListVersion = int + +// SecurityListsAPIListVersionId The version id, normally returned by the API when the document is retrieved. Use it ensure updates are done against the latest version. +type SecurityListsAPIListVersionId = string + +// SecurityListsAPIPlatformErrorResponse defines model for Security_Lists_API_PlatformErrorResponse. +type SecurityListsAPIPlatformErrorResponse struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode int `json:"statusCode"` +} + +// SecurityListsAPISiemErrorResponse defines model for Security_Lists_API_SiemErrorResponse. +type SecurityListsAPISiemErrorResponse struct { + Message string `json:"message"` + StatusCode int `json:"status_code"` +} + +// SecurityOsqueryAPIArrayQueries An array of queries to run. +type SecurityOsqueryAPIArrayQueries = []SecurityOsqueryAPIArrayQueriesItem + +// SecurityOsqueryAPIArrayQueriesItem defines model for Security_Osquery_API_ArrayQueriesItem. +type SecurityOsqueryAPIArrayQueriesItem struct { + // EcsMapping Map osquery results columns or static values to Elastic Common Schema (ECS) fields + EcsMapping *SecurityOsqueryAPIECSMappingOrUndefined `json:"ecs_mapping,omitempty"` + + // Id The ID of the query. + Id *SecurityOsqueryAPIQueryId `json:"id,omitempty"` + + // Platform Restricts the query to a specified platform. The default is all platforms. To specify multiple platforms, use commas. For example, `linux,darwin`. + Platform *SecurityOsqueryAPIPlatformOrUndefined `json:"platform,omitempty"` + + // Query The SQL query you want to run. + Query *SecurityOsqueryAPIQuery `json:"query,omitempty"` + + // Removed Indicates whether the query is removed. + Removed *SecurityOsqueryAPIRemovedOrUndefined `json:"removed,omitempty"` + + // Snapshot Indicates whether the query is a snapshot. + Snapshot *SecurityOsqueryAPISnapshotOrUndefined `json:"snapshot,omitempty"` + + // Version Uses the Osquery versions greater than or equal to the specified version string. + Version *SecurityOsqueryAPIVersionOrUndefined `json:"version,omitempty"` +} + +// SecurityOsqueryAPICreateLiveQueryRequestBody defines model for Security_Osquery_API_CreateLiveQueryRequestBody. +type SecurityOsqueryAPICreateLiveQueryRequestBody struct { + // AgentAll When `true`, the query runs on all agents. + AgentAll *bool `json:"agent_all,omitempty"` + + // AgentIds A list of agent IDs to run the query on. + AgentIds *[]string `json:"agent_ids,omitempty"` + + // AgentPlatforms A list of agent platforms to run the query on. + AgentPlatforms *[]string `json:"agent_platforms,omitempty"` + + // AgentPolicyIds A list of agent policy IDs to run the query on. + AgentPolicyIds *[]string `json:"agent_policy_ids,omitempty"` + + // AlertIds A list of alert IDs associated with the live query. + AlertIds *[]string `json:"alert_ids,omitempty"` + + // CaseIds A list of case IDs associated with the live query. + CaseIds *[]string `json:"case_ids,omitempty"` + + // EcsMapping Map osquery results columns or static values to Elastic Common Schema (ECS) fields + EcsMapping *SecurityOsqueryAPIECSMappingOrUndefined `json:"ecs_mapping,omitempty"` + + // EventIds A list of event IDs associated with the live query. + EventIds *[]string `json:"event_ids,omitempty"` + + // Metadata Custom metadata object associated with the live query. + Metadata *map[string]interface{} `json:"metadata,omitempty"` + + // PackId The ID of the pack you want to run, retrieve, update, or delete. + PackId *SecurityOsqueryAPIPackIdOrUndefined `json:"pack_id,omitempty"` + + // Queries An array of queries to run. + Queries *SecurityOsqueryAPIArrayQueries `json:"queries,omitempty"` + + // Query The SQL query you want to run. + Query *SecurityOsqueryAPIQueryOrUndefined `json:"query,omitempty"` + + // SavedQueryId The ID of a saved query. + SavedQueryId *SecurityOsqueryAPISavedQueryIdOrUndefined `json:"saved_query_id,omitempty"` +} + +// SecurityOsqueryAPICreateLiveQueryResponse defines model for Security_Osquery_API_CreateLiveQueryResponse. +type SecurityOsqueryAPICreateLiveQueryResponse = map[string]interface{} + +// SecurityOsqueryAPICreatePacksRequestBody defines model for Security_Osquery_API_CreatePacksRequestBody. +type SecurityOsqueryAPICreatePacksRequestBody struct { + // Description The pack description. + Description *SecurityOsqueryAPIPackDescriptionOrUndefined `json:"description,omitempty"` + + // Enabled Enables the pack. + Enabled *SecurityOsqueryAPIEnabledOrUndefined `json:"enabled,omitempty"` + + // Name The pack name. + Name *SecurityOsqueryAPIPackName `json:"name,omitempty"` + + // PolicyIds A list of agents policy IDs. + PolicyIds *SecurityOsqueryAPIPolicyIdsOrUndefined `json:"policy_ids,omitempty"` + + // Queries An object of queries. + Queries *SecurityOsqueryAPIObjectQueries `json:"queries,omitempty"` + + // Shards An object with shard configuration for policies included in the pack. For each policy, set the shard configuration to a percentage (1–100) of target hosts. + Shards *SecurityOsqueryAPIShards `json:"shards,omitempty"` +} + +// SecurityOsqueryAPICreatePacksResponse defines model for Security_Osquery_API_CreatePacksResponse. +type SecurityOsqueryAPICreatePacksResponse = map[string]interface{} + +// SecurityOsqueryAPICreateSavedQueryRequestBody defines model for Security_Osquery_API_CreateSavedQueryRequestBody. +type SecurityOsqueryAPICreateSavedQueryRequestBody struct { + // Description The saved query description. + Description *SecurityOsqueryAPISavedQueryDescriptionOrUndefined `json:"description,omitempty"` + + // EcsMapping Map osquery results columns or static values to Elastic Common Schema (ECS) fields + EcsMapping *SecurityOsqueryAPIECSMappingOrUndefined `json:"ecs_mapping,omitempty"` + + // Id The ID of a saved query. + Id *SecurityOsqueryAPISavedQueryId `json:"id,omitempty"` + + // Interval An interval, in seconds, on which to run the query. + Interval *SecurityOsqueryAPIInterval `json:"interval,omitempty"` + + // Platform Restricts the query to a specified platform. The default is all platforms. To specify multiple platforms, use commas. For example, `linux,darwin`. + Platform *SecurityOsqueryAPIPlatformOrUndefined `json:"platform,omitempty"` + + // Query The SQL query you want to run. + Query *SecurityOsqueryAPIQueryOrUndefined `json:"query,omitempty"` + + // Removed Indicates whether the query is removed. + Removed *SecurityOsqueryAPIRemovedOrUndefined `json:"removed,omitempty"` + + // Snapshot Indicates whether the query is a snapshot. + Snapshot *SecurityOsqueryAPISnapshotOrUndefined `json:"snapshot,omitempty"` + + // Version Uses the Osquery versions greater than or equal to the specified version string. + Version *SecurityOsqueryAPIVersionOrUndefined `json:"version,omitempty"` +} + +// SecurityOsqueryAPICreateSavedQueryResponse defines model for Security_Osquery_API_CreateSavedQueryResponse. +type SecurityOsqueryAPICreateSavedQueryResponse = map[string]interface{} + +// SecurityOsqueryAPIDefaultSuccessResponse defines model for Security_Osquery_API_DefaultSuccessResponse. +type SecurityOsqueryAPIDefaultSuccessResponse = map[string]interface{} + +// SecurityOsqueryAPIECSMapping Map osquery results columns or static values to Elastic Common Schema (ECS) fields +type SecurityOsqueryAPIECSMapping map[string]SecurityOsqueryAPIECSMappingItem + +// SecurityOsqueryAPIECSMappingItem defines model for Security_Osquery_API_ECSMappingItem. +type SecurityOsqueryAPIECSMappingItem struct { + // Field The ECS field to map to. + Field *string `json:"field,omitempty"` + + // Value The value to map to the ECS field. + Value *SecurityOsqueryAPIECSMappingItem_Value `json:"value,omitempty"` +} + +// SecurityOsqueryAPIECSMappingItemValue0 defines model for . +type SecurityOsqueryAPIECSMappingItemValue0 = string + +// SecurityOsqueryAPIECSMappingItemValue1 defines model for . +type SecurityOsqueryAPIECSMappingItemValue1 = []string + +// SecurityOsqueryAPIECSMappingItem_Value The value to map to the ECS field. +type SecurityOsqueryAPIECSMappingItem_Value struct { + union json.RawMessage +} + +// SecurityOsqueryAPIECSMappingOrUndefined Map osquery results columns or static values to Elastic Common Schema (ECS) fields +type SecurityOsqueryAPIECSMappingOrUndefined = SecurityOsqueryAPIECSMapping + +// SecurityOsqueryAPIEnabled Enables the pack. +type SecurityOsqueryAPIEnabled = bool + +// SecurityOsqueryAPIEnabledOrUndefined Enables the pack. +type SecurityOsqueryAPIEnabledOrUndefined = SecurityOsqueryAPIEnabled + +// SecurityOsqueryAPIFindLiveQueryDetailsResponse defines model for Security_Osquery_API_FindLiveQueryDetailsResponse. +type SecurityOsqueryAPIFindLiveQueryDetailsResponse = map[string]interface{} + +// SecurityOsqueryAPIFindLiveQueryResponse defines model for Security_Osquery_API_FindLiveQueryResponse. +type SecurityOsqueryAPIFindLiveQueryResponse = map[string]interface{} + +// SecurityOsqueryAPIFindPackResponse defines model for Security_Osquery_API_FindPackResponse. +type SecurityOsqueryAPIFindPackResponse = map[string]interface{} + +// SecurityOsqueryAPIFindPacksResponse defines model for Security_Osquery_API_FindPacksResponse. +type SecurityOsqueryAPIFindPacksResponse = map[string]interface{} + +// SecurityOsqueryAPIFindSavedQueryDetailResponse defines model for Security_Osquery_API_FindSavedQueryDetailResponse. +type SecurityOsqueryAPIFindSavedQueryDetailResponse = map[string]interface{} + +// SecurityOsqueryAPIFindSavedQueryResponse defines model for Security_Osquery_API_FindSavedQueryResponse. +type SecurityOsqueryAPIFindSavedQueryResponse = map[string]interface{} + +// SecurityOsqueryAPIGetLiveQueryResultsResponse The response for getting live query results. +type SecurityOsqueryAPIGetLiveQueryResultsResponse = map[string]interface{} + +// SecurityOsqueryAPIInterval An interval, in seconds, on which to run the query. +type SecurityOsqueryAPIInterval = string + +// SecurityOsqueryAPIIntervalOrUndefined An interval, in seconds, on which to run the query. +type SecurityOsqueryAPIIntervalOrUndefined = SecurityOsqueryAPIInterval + +// SecurityOsqueryAPIKueryOrUndefined The kuery to filter the results by. +type SecurityOsqueryAPIKueryOrUndefined = string + +// SecurityOsqueryAPIObjectQueries An object of queries. +type SecurityOsqueryAPIObjectQueries map[string]SecurityOsqueryAPIObjectQueriesItem + +// SecurityOsqueryAPIObjectQueriesItem defines model for Security_Osquery_API_ObjectQueriesItem. +type SecurityOsqueryAPIObjectQueriesItem struct { + // EcsMapping Map osquery results columns or static values to Elastic Common Schema (ECS) fields + EcsMapping *SecurityOsqueryAPIECSMappingOrUndefined `json:"ecs_mapping,omitempty"` + + // Id The ID of the query. + Id *SecurityOsqueryAPIQueryId `json:"id,omitempty"` + + // Platform Restricts the query to a specified platform. The default is all platforms. To specify multiple platforms, use commas. For example, `linux,darwin`. + Platform *SecurityOsqueryAPIPlatformOrUndefined `json:"platform,omitempty"` + + // Query The SQL query you want to run. + Query *SecurityOsqueryAPIQuery `json:"query,omitempty"` + + // Removed Indicates whether the query is removed. + Removed *SecurityOsqueryAPIRemovedOrUndefined `json:"removed,omitempty"` + + // SavedQueryId The ID of a saved query. + SavedQueryId *SecurityOsqueryAPISavedQueryIdOrUndefined `json:"saved_query_id,omitempty"` + + // Snapshot Indicates whether the query is a snapshot. + Snapshot *SecurityOsqueryAPISnapshotOrUndefined `json:"snapshot,omitempty"` + + // Version Uses the Osquery versions greater than or equal to the specified version string. + Version *SecurityOsqueryAPIVersionOrUndefined `json:"version,omitempty"` +} + +// SecurityOsqueryAPIPackDescription The pack description. +type SecurityOsqueryAPIPackDescription = string + +// SecurityOsqueryAPIPackDescriptionOrUndefined The pack description. +type SecurityOsqueryAPIPackDescriptionOrUndefined = SecurityOsqueryAPIPackDescription + +// SecurityOsqueryAPIPackId The ID of the pack you want to run, retrieve, update, or delete. +type SecurityOsqueryAPIPackId = string + +// SecurityOsqueryAPIPackIdOrUndefined The ID of the pack you want to run, retrieve, update, or delete. +type SecurityOsqueryAPIPackIdOrUndefined = SecurityOsqueryAPIPackId + +// SecurityOsqueryAPIPackName The pack name. +type SecurityOsqueryAPIPackName = string + +// SecurityOsqueryAPIPageOrUndefined The page number to return. The default is 1. +type SecurityOsqueryAPIPageOrUndefined = int + +// SecurityOsqueryAPIPageSizeOrUndefined The number of results to return per page. The default is 20. +type SecurityOsqueryAPIPageSizeOrUndefined = int + +// SecurityOsqueryAPIPlatform Restricts the query to a specified platform. The default is all platforms. To specify multiple platforms, use commas. For example, `linux,darwin`. +type SecurityOsqueryAPIPlatform = string + +// SecurityOsqueryAPIPlatformOrUndefined Restricts the query to a specified platform. The default is all platforms. To specify multiple platforms, use commas. For example, `linux,darwin`. +type SecurityOsqueryAPIPlatformOrUndefined = SecurityOsqueryAPIPlatform + +// SecurityOsqueryAPIPolicyIds A list of agents policy IDs. +type SecurityOsqueryAPIPolicyIds = []string + +// SecurityOsqueryAPIPolicyIdsOrUndefined A list of agents policy IDs. +type SecurityOsqueryAPIPolicyIdsOrUndefined = SecurityOsqueryAPIPolicyIds + +// SecurityOsqueryAPIQuery The SQL query you want to run. +type SecurityOsqueryAPIQuery = string + +// SecurityOsqueryAPIQueryId The ID of the query. +type SecurityOsqueryAPIQueryId = string + +// SecurityOsqueryAPIQueryOrUndefined The SQL query you want to run. +type SecurityOsqueryAPIQueryOrUndefined = SecurityOsqueryAPIQuery + +// SecurityOsqueryAPIRemoved Indicates whether the query is removed. +type SecurityOsqueryAPIRemoved = bool + +// SecurityOsqueryAPIRemovedOrUndefined Indicates whether the query is removed. +type SecurityOsqueryAPIRemovedOrUndefined = SecurityOsqueryAPIRemoved + +// SecurityOsqueryAPISavedQueryDescription The saved query description. +type SecurityOsqueryAPISavedQueryDescription = string + +// SecurityOsqueryAPISavedQueryDescriptionOrUndefined The saved query description. +type SecurityOsqueryAPISavedQueryDescriptionOrUndefined = SecurityOsqueryAPISavedQueryDescription + +// SecurityOsqueryAPISavedQueryId The ID of a saved query. +type SecurityOsqueryAPISavedQueryId = string + +// SecurityOsqueryAPISavedQueryIdOrUndefined The ID of a saved query. +type SecurityOsqueryAPISavedQueryIdOrUndefined = SecurityOsqueryAPISavedQueryId + +// SecurityOsqueryAPIShards An object with shard configuration for policies included in the pack. For each policy, set the shard configuration to a percentage (1–100) of target hosts. +type SecurityOsqueryAPIShards map[string]float32 + +// SecurityOsqueryAPISnapshot Indicates whether the query is a snapshot. +type SecurityOsqueryAPISnapshot = bool + +// SecurityOsqueryAPISnapshotOrUndefined Indicates whether the query is a snapshot. +type SecurityOsqueryAPISnapshotOrUndefined = SecurityOsqueryAPISnapshot + +// SecurityOsqueryAPISortOrUndefined The field that is used to sort the results. +type SecurityOsqueryAPISortOrUndefined = string + +// SecurityOsqueryAPISortOrderOrUndefined Specifies the sort order. +type SecurityOsqueryAPISortOrderOrUndefined string + +// SecurityOsqueryAPIUpdatePacksRequestBody defines model for Security_Osquery_API_UpdatePacksRequestBody. +type SecurityOsqueryAPIUpdatePacksRequestBody struct { + // Description The pack description. + Description *SecurityOsqueryAPIPackDescriptionOrUndefined `json:"description,omitempty"` + + // Enabled Enables the pack. + Enabled *SecurityOsqueryAPIEnabledOrUndefined `json:"enabled,omitempty"` + + // Name The pack name. + Name *SecurityOsqueryAPIPackName `json:"name,omitempty"` + + // PolicyIds A list of agents policy IDs. + PolicyIds *SecurityOsqueryAPIPolicyIdsOrUndefined `json:"policy_ids,omitempty"` + + // Queries An object of queries. + Queries *SecurityOsqueryAPIObjectQueries `json:"queries,omitempty"` + + // Shards An object with shard configuration for policies included in the pack. For each policy, set the shard configuration to a percentage (1–100) of target hosts. + Shards *SecurityOsqueryAPIShards `json:"shards,omitempty"` +} + +// SecurityOsqueryAPIUpdatePacksResponse defines model for Security_Osquery_API_UpdatePacksResponse. +type SecurityOsqueryAPIUpdatePacksResponse = map[string]interface{} + +// SecurityOsqueryAPIUpdateSavedQueryRequestBody defines model for Security_Osquery_API_UpdateSavedQueryRequestBody. +type SecurityOsqueryAPIUpdateSavedQueryRequestBody struct { + // Description The saved query description. + Description *SecurityOsqueryAPISavedQueryDescriptionOrUndefined `json:"description,omitempty"` + + // EcsMapping Map osquery results columns or static values to Elastic Common Schema (ECS) fields + EcsMapping *SecurityOsqueryAPIECSMappingOrUndefined `json:"ecs_mapping,omitempty"` + + // Id The ID of a saved query. + Id *SecurityOsqueryAPISavedQueryId `json:"id,omitempty"` + + // Interval An interval, in seconds, on which to run the query. + Interval *SecurityOsqueryAPIIntervalOrUndefined `json:"interval,omitempty"` + + // Platform Restricts the query to a specified platform. The default is all platforms. To specify multiple platforms, use commas. For example, `linux,darwin`. + Platform *SecurityOsqueryAPIPlatformOrUndefined `json:"platform,omitempty"` + + // Query The SQL query you want to run. + Query *SecurityOsqueryAPIQueryOrUndefined `json:"query,omitempty"` + + // Removed Indicates whether the query is removed. + Removed *SecurityOsqueryAPIRemovedOrUndefined `json:"removed,omitempty"` + + // Snapshot Indicates whether the query is a snapshot. + Snapshot *SecurityOsqueryAPISnapshotOrUndefined `json:"snapshot,omitempty"` + + // Version Uses the Osquery versions greater than or equal to the specified version string. + Version *SecurityOsqueryAPIVersionOrUndefined `json:"version,omitempty"` +} + +// SecurityOsqueryAPIUpdateSavedQueryResponse defines model for Security_Osquery_API_UpdateSavedQueryResponse. +type SecurityOsqueryAPIUpdateSavedQueryResponse = map[string]interface{} + +// SecurityOsqueryAPIVersion Uses the Osquery versions greater than or equal to the specified version string. +type SecurityOsqueryAPIVersion = string + +// SecurityOsqueryAPIVersionOrUndefined Uses the Osquery versions greater than or equal to the specified version string. +type SecurityOsqueryAPIVersionOrUndefined = SecurityOsqueryAPIVersion + +// SecurityTimelineAPIAssociatedFilterType Filter notes based on their association with a document or saved object. +type SecurityTimelineAPIAssociatedFilterType string + +// SecurityTimelineAPIBareNote defines model for Security_Timeline_API_BareNote. +type SecurityTimelineAPIBareNote struct { + // Created The time the note was created, using a 13-digit Epoch timestamp. + Created *float32 `json:"created,omitempty"` + + // CreatedBy The user who created the note. + CreatedBy *string `json:"createdBy,omitempty"` + + // EventId The `_id` of the associated event for this note. + EventId *string `json:"eventId,omitempty"` + + // Note The text of the note + Note *string `json:"note,omitempty"` + + // TimelineId The `savedObjectId` of the Timeline that this note is associated with + TimelineId string `json:"timelineId"` + + // Updated The last time the note was updated, using a 13-digit Epoch timestamp + Updated *float32 `json:"updated,omitempty"` + + // UpdatedBy The user who last updated the note + UpdatedBy *string `json:"updatedBy,omitempty"` +} + +// SecurityTimelineAPIBarePinnedEvent defines model for Security_Timeline_API_BarePinnedEvent. +type SecurityTimelineAPIBarePinnedEvent struct { + // Created The time the pinned event was created, using a 13-digit Epoch timestamp. + Created *float32 `json:"created,omitempty"` + + // CreatedBy The user who created the pinned event. + CreatedBy *string `json:"createdBy,omitempty"` + + // EventId The `_id` of the associated event for this pinned event. + EventId string `json:"eventId"` + + // TimelineId The `savedObjectId` of the timeline that this pinned event is associated with + TimelineId string `json:"timelineId"` + + // Updated The last time the pinned event was updated, using a 13-digit Epoch timestamp + Updated *float32 `json:"updated,omitempty"` + + // UpdatedBy The user who last updated the pinned event + UpdatedBy *string `json:"updatedBy,omitempty"` +} + +// SecurityTimelineAPIColumnHeaderResult defines model for Security_Timeline_API_ColumnHeaderResult. +type SecurityTimelineAPIColumnHeaderResult struct { + Aggregatable *bool `json:"aggregatable,omitempty"` + Category *string `json:"category,omitempty"` + ColumnHeaderType *string `json:"columnHeaderType,omitempty"` + Description *string `json:"description,omitempty"` + Id *string `json:"id,omitempty"` + Indexes *[]string `json:"indexes,omitempty"` + Name *string `json:"name,omitempty"` + Placeholder *string `json:"placeholder,omitempty"` + Searchable *bool `json:"searchable,omitempty"` + Type *string `json:"type,omitempty"` +} + +// SecurityTimelineAPIDataProviderQueryMatch defines model for Security_Timeline_API_DataProviderQueryMatch. +type SecurityTimelineAPIDataProviderQueryMatch struct { + Enabled *bool `json:"enabled,omitempty"` + Excluded *bool `json:"excluded,omitempty"` + Id *string `json:"id,omitempty"` + KqlQuery *string `json:"kqlQuery,omitempty"` + Name *string `json:"name,omitempty"` + QueryMatch *SecurityTimelineAPIQueryMatchResult `json:"queryMatch,omitempty"` + + // Type The type of data provider. + Type *SecurityTimelineAPIDataProviderType `json:"type,omitempty"` +} + +// SecurityTimelineAPIDataProviderResult defines model for Security_Timeline_API_DataProviderResult. +type SecurityTimelineAPIDataProviderResult struct { + And *[]SecurityTimelineAPIDataProviderQueryMatch `json:"and,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + Excluded *bool `json:"excluded,omitempty"` + Id *string `json:"id,omitempty"` + KqlQuery *string `json:"kqlQuery,omitempty"` + Name *string `json:"name,omitempty"` + QueryMatch *SecurityTimelineAPIQueryMatchResult `json:"queryMatch,omitempty"` + + // Type The type of data provider. + Type *SecurityTimelineAPIDataProviderType `json:"type,omitempty"` +} + +// SecurityTimelineAPIDataProviderType The type of data provider. +type SecurityTimelineAPIDataProviderType string + +// SecurityTimelineAPIDocumentIds defines model for Security_Timeline_API_DocumentIds. +type SecurityTimelineAPIDocumentIds struct { + union json.RawMessage +} + +// SecurityTimelineAPIDocumentIds0 defines model for . +type SecurityTimelineAPIDocumentIds0 = []string + +// SecurityTimelineAPIDocumentIds1 defines model for . +type SecurityTimelineAPIDocumentIds1 = string + +// SecurityTimelineAPIFavoriteTimelineResponse defines model for Security_Timeline_API_FavoriteTimelineResponse. +type SecurityTimelineAPIFavoriteTimelineResponse struct { + Favorite *[]SecurityTimelineAPIFavoriteTimelineResult `json:"favorite,omitempty"` + SavedObjectId string `json:"savedObjectId"` + TemplateTimelineId *string `json:"templateTimelineId,omitempty"` + TemplateTimelineVersion *float32 `json:"templateTimelineVersion,omitempty"` + + // TimelineType The type of Timeline. + TimelineType *SecurityTimelineAPITimelineType `json:"timelineType,omitempty"` + Version string `json:"version"` +} + +// SecurityTimelineAPIFavoriteTimelineResult Indicates when and who marked a Timeline as a favorite. +type SecurityTimelineAPIFavoriteTimelineResult struct { + FavoriteDate *float32 `json:"favoriteDate,omitempty"` + FullName *string `json:"fullName,omitempty"` + UserName *string `json:"userName,omitempty"` +} + +// SecurityTimelineAPIFilterTimelineResult defines model for Security_Timeline_API_FilterTimelineResult. +type SecurityTimelineAPIFilterTimelineResult struct { + Exists *string `json:"exists,omitempty"` + MatchAll *string `json:"match_all,omitempty"` + Meta *struct { + Alias *string `json:"alias,omitempty"` + ControlledBy *string `json:"controlledBy,omitempty"` + Disabled *bool `json:"disabled,omitempty"` + Field *string `json:"field,omitempty"` + FormattedValue *string `json:"formattedValue,omitempty"` + Index *string `json:"index,omitempty"` + Key *string `json:"key,omitempty"` + Negate *bool `json:"negate,omitempty"` + Params *string `json:"params,omitempty"` + Type *string `json:"type,omitempty"` + Value *string `json:"value,omitempty"` + } `json:"meta,omitempty"` + Missing *string `json:"missing,omitempty"` + Query *string `json:"query,omitempty"` + Range *string `json:"range,omitempty"` + Script *string `json:"script,omitempty"` +} + +// SecurityTimelineAPIGetNotesResult defines model for Security_Timeline_API_GetNotesResult. +type SecurityTimelineAPIGetNotesResult struct { + Notes []SecurityTimelineAPINote `json:"notes"` + TotalCount float32 `json:"totalCount"` +} + +// SecurityTimelineAPIImportTimelineResult defines model for Security_Timeline_API_ImportTimelineResult. +type SecurityTimelineAPIImportTimelineResult struct { + // Errors The list of failed Timeline imports + Errors *[]struct { + // Error The error containing the reason why the timeline could not be imported + Error *struct { + // Message The reason why the timeline could not be imported + Message *string `json:"message,omitempty"` + + // StatusCode The HTTP status code of the error + StatusCode *float32 `json:"status_code,omitempty"` + } `json:"error,omitempty"` + + // Id The ID of the timeline that failed to import + Id *string `json:"id,omitempty"` + } `json:"errors,omitempty"` + + // Success Indicates whether any of the Timelines were successfully imports + Success *bool `json:"success,omitempty"` + + // SuccessCount The amount of successfully imported/updated Timelines + SuccessCount *float32 `json:"success_count,omitempty"` + + // TimelinesInstalled The amount of successfully installed Timelines + TimelinesInstalled *float32 `json:"timelines_installed,omitempty"` + + // TimelinesUpdated The amount of successfully updated Timelines + TimelinesUpdated *float32 `json:"timelines_updated,omitempty"` +} + +// SecurityTimelineAPIImportTimelines defines model for Security_Timeline_API_ImportTimelines. +type SecurityTimelineAPIImportTimelines struct { + // Columns The Timeline's columns + Columns *[]SecurityTimelineAPIColumnHeaderResult `json:"columns,omitempty"` + + // Created The time the Timeline was created, using a 13-digit Epoch timestamp. + Created *float32 `json:"created,omitempty"` + + // CreatedBy The user who created the Timeline. + CreatedBy *string `json:"createdBy,omitempty"` + + // DataProviders Object containing query clauses + DataProviders *[]SecurityTimelineAPIDataProviderResult `json:"dataProviders,omitempty"` + + // DataViewId ID of the Timeline's Data View + DataViewId *string `json:"dataViewId,omitempty"` + + // DateRange The Timeline's search period. + DateRange *struct { + End *SecurityTimelineAPIImportTimelines_DateRange_End `json:"end,omitempty"` + Start *SecurityTimelineAPIImportTimelines_DateRange_Start `json:"start,omitempty"` + } `json:"dateRange,omitempty"` + + // Description The Timeline's description + Description *string `json:"description,omitempty"` + + // EqlOptions EQL query that is used in the correlation tab + EqlOptions *struct { + EventCategoryField *string `json:"eventCategoryField,omitempty"` + Query *string `json:"query,omitempty"` + Size *SecurityTimelineAPIImportTimelines_EqlOptions_Size `json:"size,omitempty"` + TiebreakerField *string `json:"tiebreakerField,omitempty"` + TimestampField *string `json:"timestampField,omitempty"` + } `json:"eqlOptions,omitempty"` + EventNotes *[]SecurityTimelineAPIBareNote `json:"eventNotes,omitempty"` + + // EventType Event types displayed in the Timeline + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + EventType *string `json:"eventType,omitempty"` + + // ExcludedRowRendererIds A list of row renderers that should not be used when in `Event renderers` mode + ExcludedRowRendererIds *[]SecurityTimelineAPIRowRendererId `json:"excludedRowRendererIds,omitempty"` + Favorite *[]SecurityTimelineAPIFavoriteTimelineResult `json:"favorite,omitempty"` + + // Filters A list of filters that should be applied to the query + Filters *[]SecurityTimelineAPIFilterTimelineResult `json:"filters,omitempty"` + GlobalNotes *[]SecurityTimelineAPIBareNote `json:"globalNotes,omitempty"` + + // IndexNames A list of index names to use in the query (e.g. when the default data view has been modified) + IndexNames *[]string `json:"indexNames,omitempty"` + + // KqlMode Indicates whether the KQL bar filters the query results or searches for additional results, where: + // * `filter`: filters query results + // * `search`: displays additional search results + KqlMode *string `json:"kqlMode,omitempty"` + + // KqlQuery KQL bar query. + KqlQuery *SecurityTimelineAPISerializedFilterQueryResult `json:"kqlQuery,omitempty"` + PinnedEventIds *[]string `json:"pinnedEventIds,omitempty"` + SavedObjectId *string `json:"savedObjectId,omitempty"` + + // SavedQueryId The ID of the saved query that might be used in the Query tab + SavedQueryId *string `json:"savedQueryId,omitempty"` + + // SavedSearchId The ID of the saved search that is used in the ES|QL tab + SavedSearchId *string `json:"savedSearchId,omitempty"` + Sort *SecurityTimelineAPISort `json:"sort,omitempty"` + + // Status The status of the Timeline. + Status *SecurityTimelineAPITimelineStatus `json:"status,omitempty"` + + // TemplateTimelineId A unique ID (UUID) for Timeline templates. For Timelines, the value is `null`. + TemplateTimelineId *string `json:"templateTimelineId,omitempty"` + + // TemplateTimelineVersion Timeline template version number. For Timelines, the value is `null`. + TemplateTimelineVersion *float32 `json:"templateTimelineVersion,omitempty"` + + // TimelineType The type of Timeline. + TimelineType *SecurityTimelineAPITimelineType `json:"timelineType,omitempty"` + + // Title The Timeline's title. + Title *string `json:"title,omitempty"` + + // Updated The last time the Timeline was updated, using a 13-digit Epoch timestamp + Updated *float32 `json:"updated,omitempty"` + + // UpdatedBy The user who last updated the Timeline + UpdatedBy *string `json:"updatedBy,omitempty"` + Version *string `json:"version,omitempty"` +} + +// SecurityTimelineAPIImportTimelinesDateRangeEnd0 defines model for . +type SecurityTimelineAPIImportTimelinesDateRangeEnd0 = string + +// SecurityTimelineAPIImportTimelinesDateRangeEnd1 defines model for . +type SecurityTimelineAPIImportTimelinesDateRangeEnd1 = float32 + +// SecurityTimelineAPIImportTimelines_DateRange_End defines model for SecurityTimelineAPIImportTimelines.DateRange.End. +type SecurityTimelineAPIImportTimelines_DateRange_End struct { + union json.RawMessage +} + +// SecurityTimelineAPIImportTimelinesDateRangeStart0 defines model for . +type SecurityTimelineAPIImportTimelinesDateRangeStart0 = string + +// SecurityTimelineAPIImportTimelinesDateRangeStart1 defines model for . +type SecurityTimelineAPIImportTimelinesDateRangeStart1 = float32 + +// SecurityTimelineAPIImportTimelines_DateRange_Start defines model for SecurityTimelineAPIImportTimelines.DateRange.Start. +type SecurityTimelineAPIImportTimelines_DateRange_Start struct { + union json.RawMessage +} + +// SecurityTimelineAPIImportTimelinesEqlOptionsSize0 defines model for . +type SecurityTimelineAPIImportTimelinesEqlOptionsSize0 = string + +// SecurityTimelineAPIImportTimelinesEqlOptionsSize1 defines model for . +type SecurityTimelineAPIImportTimelinesEqlOptionsSize1 = float32 + +// SecurityTimelineAPIImportTimelines_EqlOptions_Size defines model for SecurityTimelineAPIImportTimelines.EqlOptions.Size. +type SecurityTimelineAPIImportTimelines_EqlOptions_Size struct { + union json.RawMessage +} + +// SecurityTimelineAPINote defines model for Security_Timeline_API_Note. +type SecurityTimelineAPINote struct { + // Created The time the note was created, using a 13-digit Epoch timestamp. + Created *float32 `json:"created,omitempty"` + + // CreatedBy The user who created the note. + CreatedBy *string `json:"createdBy,omitempty"` + + // EventId The `_id` of the associated event for this note. + EventId *string `json:"eventId,omitempty"` + + // Note The text of the note + Note *string `json:"note,omitempty"` + + // NoteId The `savedObjectId` of the note + NoteId string `json:"noteId"` + + // TimelineId The `savedObjectId` of the Timeline that this note is associated with + TimelineId string `json:"timelineId"` + + // Updated The last time the note was updated, using a 13-digit Epoch timestamp + Updated *float32 `json:"updated,omitempty"` + + // UpdatedBy The user who last updated the note + UpdatedBy *string `json:"updatedBy,omitempty"` + + // Version The version of the note + Version string `json:"version"` +} + +// SecurityTimelineAPINoteCreatedAndUpdatedMetadata defines model for Security_Timeline_API_NoteCreatedAndUpdatedMetadata. +type SecurityTimelineAPINoteCreatedAndUpdatedMetadata struct { + // Created The time the note was created, using a 13-digit Epoch timestamp. + Created *float32 `json:"created,omitempty"` + + // CreatedBy The user who created the note. + CreatedBy *string `json:"createdBy,omitempty"` + + // Updated The last time the note was updated, using a 13-digit Epoch timestamp + Updated *float32 `json:"updated,omitempty"` + + // UpdatedBy The user who last updated the note + UpdatedBy *string `json:"updatedBy,omitempty"` +} + +// SecurityTimelineAPIPersistPinnedEventResponse defines model for Security_Timeline_API_PersistPinnedEventResponse. +type SecurityTimelineAPIPersistPinnedEventResponse struct { + union json.RawMessage +} + +// SecurityTimelineAPIPersistPinnedEventResponse1 defines model for . +type SecurityTimelineAPIPersistPinnedEventResponse1 struct { + // Unpinned Indicates whether the event was successfully unpinned + Unpinned bool `json:"unpinned"` +} + +// SecurityTimelineAPIPersistTimelineResponse defines model for Security_Timeline_API_PersistTimelineResponse. +type SecurityTimelineAPIPersistTimelineResponse = SecurityTimelineAPITimelineResponse + +// SecurityTimelineAPIPinnedEvent defines model for Security_Timeline_API_PinnedEvent. +type SecurityTimelineAPIPinnedEvent struct { + // Created The time the pinned event was created, using a 13-digit Epoch timestamp. + Created *float32 `json:"created,omitempty"` + + // CreatedBy The user who created the pinned event. + CreatedBy *string `json:"createdBy,omitempty"` + + // EventId The `_id` of the associated event for this pinned event. + EventId string `json:"eventId"` + + // PinnedEventId The `savedObjectId` of this pinned event + PinnedEventId string `json:"pinnedEventId"` + + // TimelineId The `savedObjectId` of the timeline that this pinned event is associated with + TimelineId string `json:"timelineId"` + + // Updated The last time the pinned event was updated, using a 13-digit Epoch timestamp + Updated *float32 `json:"updated,omitempty"` + + // UpdatedBy The user who last updated the pinned event + UpdatedBy *string `json:"updatedBy,omitempty"` + + // Version The version of this pinned event + Version string `json:"version"` +} + +// SecurityTimelineAPIPinnedEventCreatedAndUpdatedMetadata defines model for Security_Timeline_API_PinnedEventCreatedAndUpdatedMetadata. +type SecurityTimelineAPIPinnedEventCreatedAndUpdatedMetadata struct { + // Created The time the pinned event was created, using a 13-digit Epoch timestamp. + Created *float32 `json:"created,omitempty"` + + // CreatedBy The user who created the pinned event. + CreatedBy *string `json:"createdBy,omitempty"` + + // Updated The last time the pinned event was updated, using a 13-digit Epoch timestamp + Updated *float32 `json:"updated,omitempty"` + + // UpdatedBy The user who last updated the pinned event + UpdatedBy *string `json:"updatedBy,omitempty"` +} + +// SecurityTimelineAPIQueryMatchResult defines model for Security_Timeline_API_QueryMatchResult. +type SecurityTimelineAPIQueryMatchResult struct { + DisplayField *string `json:"displayField,omitempty"` + DisplayValue *string `json:"displayValue,omitempty"` + Field *string `json:"field,omitempty"` + Operator *string `json:"operator,omitempty"` + Value *SecurityTimelineAPIQueryMatchResult_Value `json:"value,omitempty"` +} + +// SecurityTimelineAPIQueryMatchResultValue0 defines model for . +type SecurityTimelineAPIQueryMatchResultValue0 = string + +// SecurityTimelineAPIQueryMatchResultValue1 defines model for . +type SecurityTimelineAPIQueryMatchResultValue1 = []string + +// SecurityTimelineAPIQueryMatchResult_Value defines model for SecurityTimelineAPIQueryMatchResult.Value. +type SecurityTimelineAPIQueryMatchResult_Value struct { + union json.RawMessage +} + +// SecurityTimelineAPIResolvedTimeline defines model for Security_Timeline_API_ResolvedTimeline. +type SecurityTimelineAPIResolvedTimeline struct { + AliasPurpose *SecurityTimelineAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *string `json:"alias_target_id,omitempty"` + Outcome SecurityTimelineAPISavedObjectResolveOutcome `json:"outcome"` + Timeline SecurityTimelineAPITimelineSavedToReturnObject `json:"timeline"` +} + +// SecurityTimelineAPIResponseNote defines model for Security_Timeline_API_ResponseNote. +type SecurityTimelineAPIResponseNote struct { + Note SecurityTimelineAPINote `json:"note"` +} + +// SecurityTimelineAPIRowRendererId Identifies the available row renderers +type SecurityTimelineAPIRowRendererId string + +// SecurityTimelineAPISavedObjectIds defines model for Security_Timeline_API_SavedObjectIds. +type SecurityTimelineAPISavedObjectIds struct { + union json.RawMessage +} + +// SecurityTimelineAPISavedObjectIds0 defines model for . +type SecurityTimelineAPISavedObjectIds0 = []string + +// SecurityTimelineAPISavedObjectIds1 defines model for . +type SecurityTimelineAPISavedObjectIds1 = string + +// SecurityTimelineAPISavedObjectResolveAliasPurpose defines model for Security_Timeline_API_SavedObjectResolveAliasPurpose. +type SecurityTimelineAPISavedObjectResolveAliasPurpose string + +// SecurityTimelineAPISavedObjectResolveOutcome defines model for Security_Timeline_API_SavedObjectResolveOutcome. +type SecurityTimelineAPISavedObjectResolveOutcome string + +// SecurityTimelineAPISavedTimeline defines model for Security_Timeline_API_SavedTimeline. +type SecurityTimelineAPISavedTimeline struct { + // Columns The Timeline's columns + Columns *[]SecurityTimelineAPIColumnHeaderResult `json:"columns,omitempty"` + + // Created The time the Timeline was created, using a 13-digit Epoch timestamp. + Created *float32 `json:"created,omitempty"` + + // CreatedBy The user who created the Timeline. + CreatedBy *string `json:"createdBy,omitempty"` + + // DataProviders Object containing query clauses + DataProviders *[]SecurityTimelineAPIDataProviderResult `json:"dataProviders,omitempty"` + + // DataViewId ID of the Timeline's Data View + DataViewId *string `json:"dataViewId,omitempty"` + + // DateRange The Timeline's search period. + DateRange *struct { + End *SecurityTimelineAPISavedTimeline_DateRange_End `json:"end,omitempty"` + Start *SecurityTimelineAPISavedTimeline_DateRange_Start `json:"start,omitempty"` + } `json:"dateRange,omitempty"` + + // Description The Timeline's description + Description *string `json:"description,omitempty"` + + // EqlOptions EQL query that is used in the correlation tab + EqlOptions *struct { + EventCategoryField *string `json:"eventCategoryField,omitempty"` + Query *string `json:"query,omitempty"` + Size *SecurityTimelineAPISavedTimeline_EqlOptions_Size `json:"size,omitempty"` + TiebreakerField *string `json:"tiebreakerField,omitempty"` + TimestampField *string `json:"timestampField,omitempty"` + } `json:"eqlOptions,omitempty"` + + // EventType Event types displayed in the Timeline + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + EventType *string `json:"eventType,omitempty"` + + // ExcludedRowRendererIds A list of row renderers that should not be used when in `Event renderers` mode + ExcludedRowRendererIds *[]SecurityTimelineAPIRowRendererId `json:"excludedRowRendererIds,omitempty"` + Favorite *[]SecurityTimelineAPIFavoriteTimelineResult `json:"favorite,omitempty"` + + // Filters A list of filters that should be applied to the query + Filters *[]SecurityTimelineAPIFilterTimelineResult `json:"filters,omitempty"` + + // IndexNames A list of index names to use in the query (e.g. when the default data view has been modified) + IndexNames *[]string `json:"indexNames,omitempty"` + + // KqlMode Indicates whether the KQL bar filters the query results or searches for additional results, where: + // * `filter`: filters query results + // * `search`: displays additional search results + KqlMode *string `json:"kqlMode,omitempty"` + + // KqlQuery KQL bar query. + KqlQuery *SecurityTimelineAPISerializedFilterQueryResult `json:"kqlQuery,omitempty"` + + // SavedQueryId The ID of the saved query that might be used in the Query tab + SavedQueryId *string `json:"savedQueryId,omitempty"` + + // SavedSearchId The ID of the saved search that is used in the ES|QL tab + SavedSearchId *string `json:"savedSearchId,omitempty"` + Sort *SecurityTimelineAPISort `json:"sort,omitempty"` + + // Status The status of the Timeline. + Status *SecurityTimelineAPITimelineStatus `json:"status,omitempty"` + + // TemplateTimelineId A unique ID (UUID) for Timeline templates. For Timelines, the value is `null`. + TemplateTimelineId *string `json:"templateTimelineId,omitempty"` + + // TemplateTimelineVersion Timeline template version number. For Timelines, the value is `null`. + TemplateTimelineVersion *float32 `json:"templateTimelineVersion,omitempty"` + + // TimelineType The type of Timeline. + TimelineType *SecurityTimelineAPITimelineType `json:"timelineType,omitempty"` + + // Title The Timeline's title. + Title *string `json:"title,omitempty"` + + // Updated The last time the Timeline was updated, using a 13-digit Epoch timestamp + Updated *float32 `json:"updated,omitempty"` + + // UpdatedBy The user who last updated the Timeline + UpdatedBy *string `json:"updatedBy,omitempty"` +} + +// SecurityTimelineAPISavedTimelineDateRangeEnd0 defines model for . +type SecurityTimelineAPISavedTimelineDateRangeEnd0 = string + +// SecurityTimelineAPISavedTimelineDateRangeEnd1 defines model for . +type SecurityTimelineAPISavedTimelineDateRangeEnd1 = float32 + +// SecurityTimelineAPISavedTimeline_DateRange_End defines model for SecurityTimelineAPISavedTimeline.DateRange.End. +type SecurityTimelineAPISavedTimeline_DateRange_End struct { + union json.RawMessage +} + +// SecurityTimelineAPISavedTimelineDateRangeStart0 defines model for . +type SecurityTimelineAPISavedTimelineDateRangeStart0 = string + +// SecurityTimelineAPISavedTimelineDateRangeStart1 defines model for . +type SecurityTimelineAPISavedTimelineDateRangeStart1 = float32 + +// SecurityTimelineAPISavedTimeline_DateRange_Start defines model for SecurityTimelineAPISavedTimeline.DateRange.Start. +type SecurityTimelineAPISavedTimeline_DateRange_Start struct { + union json.RawMessage +} + +// SecurityTimelineAPISavedTimelineEqlOptionsSize0 defines model for . +type SecurityTimelineAPISavedTimelineEqlOptionsSize0 = string + +// SecurityTimelineAPISavedTimelineEqlOptionsSize1 defines model for . +type SecurityTimelineAPISavedTimelineEqlOptionsSize1 = float32 + +// SecurityTimelineAPISavedTimeline_EqlOptions_Size defines model for SecurityTimelineAPISavedTimeline.EqlOptions.Size. +type SecurityTimelineAPISavedTimeline_EqlOptions_Size struct { + union json.RawMessage +} + +// SecurityTimelineAPISavedTimelineWithSavedObjectId defines model for Security_Timeline_API_SavedTimelineWithSavedObjectId. +type SecurityTimelineAPISavedTimelineWithSavedObjectId struct { + // Columns The Timeline's columns + Columns *[]SecurityTimelineAPIColumnHeaderResult `json:"columns,omitempty"` + + // Created The time the Timeline was created, using a 13-digit Epoch timestamp. + Created *float32 `json:"created,omitempty"` + + // CreatedBy The user who created the Timeline. + CreatedBy *string `json:"createdBy,omitempty"` + + // DataProviders Object containing query clauses + DataProviders *[]SecurityTimelineAPIDataProviderResult `json:"dataProviders,omitempty"` + + // DataViewId ID of the Timeline's Data View + DataViewId *string `json:"dataViewId,omitempty"` + + // DateRange The Timeline's search period. + DateRange *struct { + End *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End `json:"end,omitempty"` + Start *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start `json:"start,omitempty"` + } `json:"dateRange,omitempty"` + + // Description The Timeline's description + Description *string `json:"description,omitempty"` + + // EqlOptions EQL query that is used in the correlation tab + EqlOptions *struct { + EventCategoryField *string `json:"eventCategoryField,omitempty"` + Query *string `json:"query,omitempty"` + Size *SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size `json:"size,omitempty"` + TiebreakerField *string `json:"tiebreakerField,omitempty"` + TimestampField *string `json:"timestampField,omitempty"` + } `json:"eqlOptions,omitempty"` + + // EventType Event types displayed in the Timeline + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + EventType *string `json:"eventType,omitempty"` + + // ExcludedRowRendererIds A list of row renderers that should not be used when in `Event renderers` mode + ExcludedRowRendererIds *[]SecurityTimelineAPIRowRendererId `json:"excludedRowRendererIds,omitempty"` + Favorite *[]SecurityTimelineAPIFavoriteTimelineResult `json:"favorite,omitempty"` + + // Filters A list of filters that should be applied to the query + Filters *[]SecurityTimelineAPIFilterTimelineResult `json:"filters,omitempty"` + + // IndexNames A list of index names to use in the query (e.g. when the default data view has been modified) + IndexNames *[]string `json:"indexNames,omitempty"` + + // KqlMode Indicates whether the KQL bar filters the query results or searches for additional results, where: + // * `filter`: filters query results + // * `search`: displays additional search results + KqlMode *string `json:"kqlMode,omitempty"` + + // KqlQuery KQL bar query. + KqlQuery *SecurityTimelineAPISerializedFilterQueryResult `json:"kqlQuery,omitempty"` + + // SavedObjectId The `savedObjectId` of the Timeline or Timeline template + SavedObjectId string `json:"savedObjectId"` + + // SavedQueryId The ID of the saved query that might be used in the Query tab + SavedQueryId *string `json:"savedQueryId,omitempty"` + + // SavedSearchId The ID of the saved search that is used in the ES|QL tab + SavedSearchId *string `json:"savedSearchId,omitempty"` + Sort *SecurityTimelineAPISort `json:"sort,omitempty"` + + // Status The status of the Timeline. + Status *SecurityTimelineAPITimelineStatus `json:"status,omitempty"` + + // TemplateTimelineId A unique ID (UUID) for Timeline templates. For Timelines, the value is `null`. + TemplateTimelineId *string `json:"templateTimelineId,omitempty"` + + // TemplateTimelineVersion Timeline template version number. For Timelines, the value is `null`. + TemplateTimelineVersion *float32 `json:"templateTimelineVersion,omitempty"` + + // TimelineType The type of Timeline. + TimelineType *SecurityTimelineAPITimelineType `json:"timelineType,omitempty"` + + // Title The Timeline's title. + Title *string `json:"title,omitempty"` + + // Updated The last time the Timeline was updated, using a 13-digit Epoch timestamp + Updated *float32 `json:"updated,omitempty"` + + // UpdatedBy The user who last updated the Timeline + UpdatedBy *string `json:"updatedBy,omitempty"` + + // Version The version of the Timeline or Timeline template + Version string `json:"version"` +} + +// SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0 defines model for . +type SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0 = string + +// SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1 defines model for . +type SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1 = float32 + +// SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End defines model for SecurityTimelineAPISavedTimelineWithSavedObjectId.DateRange.End. +type SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End struct { + union json.RawMessage +} + +// SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0 defines model for . +type SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0 = string + +// SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1 defines model for . +type SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1 = float32 + +// SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start defines model for SecurityTimelineAPISavedTimelineWithSavedObjectId.DateRange.Start. +type SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start struct { + union json.RawMessage +} + +// SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0 defines model for . +type SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0 = string + +// SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1 defines model for . +type SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1 = float32 + +// SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size defines model for SecurityTimelineAPISavedTimelineWithSavedObjectId.EqlOptions.Size. +type SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size struct { + union json.RawMessage +} + +// SecurityTimelineAPISerializedFilterQueryResult KQL bar query. +type SecurityTimelineAPISerializedFilterQueryResult struct { + FilterQuery *struct { + Kuery *struct { + Expression *string `json:"expression,omitempty"` + Kind *string `json:"kind,omitempty"` + } `json:"kuery,omitempty"` + SerializedQuery *string `json:"serializedQuery,omitempty"` + } `json:"filterQuery,omitempty"` +} + +// SecurityTimelineAPISort defines model for Security_Timeline_API_Sort. +type SecurityTimelineAPISort struct { + union json.RawMessage +} + +// SecurityTimelineAPISort1 defines model for . +type SecurityTimelineAPISort1 = []SecurityTimelineAPISortObject + +// SecurityTimelineAPISortFieldTimeline The field to sort the timelines by. +type SecurityTimelineAPISortFieldTimeline string + +// SecurityTimelineAPISortObject Object indicating how rows are sorted in the Timeline's grid +type SecurityTimelineAPISortObject struct { + ColumnId *string `json:"columnId,omitempty"` + ColumnType *string `json:"columnType,omitempty"` + SortDirection *string `json:"sortDirection,omitempty"` +} + +// SecurityTimelineAPITimelineResponse defines model for Security_Timeline_API_TimelineResponse. +type SecurityTimelineAPITimelineResponse struct { + // Columns The Timeline's columns + Columns *[]SecurityTimelineAPIColumnHeaderResult `json:"columns,omitempty"` + + // Created The time the Timeline was created, using a 13-digit Epoch timestamp. + Created *float32 `json:"created,omitempty"` + + // CreatedBy The user who created the Timeline. + CreatedBy *string `json:"createdBy,omitempty"` + + // DataProviders Object containing query clauses + DataProviders *[]SecurityTimelineAPIDataProviderResult `json:"dataProviders,omitempty"` + + // DataViewId ID of the Timeline's Data View + DataViewId *string `json:"dataViewId,omitempty"` + + // DateRange The Timeline's search period. + DateRange *struct { + End *SecurityTimelineAPITimelineResponse_DateRange_End `json:"end,omitempty"` + Start *SecurityTimelineAPITimelineResponse_DateRange_Start `json:"start,omitempty"` + } `json:"dateRange,omitempty"` + + // Description The Timeline's description + Description *string `json:"description,omitempty"` + + // EqlOptions EQL query that is used in the correlation tab + EqlOptions *struct { + EventCategoryField *string `json:"eventCategoryField,omitempty"` + Query *string `json:"query,omitempty"` + Size *SecurityTimelineAPITimelineResponse_EqlOptions_Size `json:"size,omitempty"` + TiebreakerField *string `json:"tiebreakerField,omitempty"` + TimestampField *string `json:"timestampField,omitempty"` + } `json:"eqlOptions,omitempty"` + + // EventIdToNoteIds A list of all the notes that are associated to this Timeline. + EventIdToNoteIds *[]SecurityTimelineAPINote `json:"eventIdToNoteIds,omitempty"` + + // EventType Event types displayed in the Timeline + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + EventType *string `json:"eventType,omitempty"` + + // ExcludedRowRendererIds A list of row renderers that should not be used when in `Event renderers` mode + ExcludedRowRendererIds *[]SecurityTimelineAPIRowRendererId `json:"excludedRowRendererIds,omitempty"` + Favorite *[]SecurityTimelineAPIFavoriteTimelineResult `json:"favorite,omitempty"` + + // Filters A list of filters that should be applied to the query + Filters *[]SecurityTimelineAPIFilterTimelineResult `json:"filters,omitempty"` + + // IndexNames A list of index names to use in the query (e.g. when the default data view has been modified) + IndexNames *[]string `json:"indexNames,omitempty"` + + // KqlMode Indicates whether the KQL bar filters the query results or searches for additional results, where: + // * `filter`: filters query results + // * `search`: displays additional search results + KqlMode *string `json:"kqlMode,omitempty"` + + // KqlQuery KQL bar query. + KqlQuery *SecurityTimelineAPISerializedFilterQueryResult `json:"kqlQuery,omitempty"` + + // NoteIds A list of all the ids of notes that are associated to this Timeline. + NoteIds *[]string `json:"noteIds,omitempty"` + + // Notes A list of all the notes that are associated to this Timeline. + Notes *[]SecurityTimelineAPINote `json:"notes,omitempty"` + + // PinnedEventIds A list of all the ids of pinned events that are associated to this Timeline. + PinnedEventIds *[]string `json:"pinnedEventIds,omitempty"` + + // PinnedEventsSaveObject A list of all the pinned events that are associated to this Timeline. + PinnedEventsSaveObject *[]SecurityTimelineAPIPinnedEvent `json:"pinnedEventsSaveObject,omitempty"` + + // SavedObjectId The `savedObjectId` of the Timeline or Timeline template + SavedObjectId string `json:"savedObjectId"` + + // SavedQueryId The ID of the saved query that might be used in the Query tab + SavedQueryId *string `json:"savedQueryId,omitempty"` + + // SavedSearchId The ID of the saved search that is used in the ES|QL tab + SavedSearchId *string `json:"savedSearchId,omitempty"` + Sort *SecurityTimelineAPISort `json:"sort,omitempty"` + + // Status The status of the Timeline. + Status *SecurityTimelineAPITimelineStatus `json:"status,omitempty"` + + // TemplateTimelineId A unique ID (UUID) for Timeline templates. For Timelines, the value is `null`. + TemplateTimelineId *string `json:"templateTimelineId,omitempty"` + + // TemplateTimelineVersion Timeline template version number. For Timelines, the value is `null`. + TemplateTimelineVersion *float32 `json:"templateTimelineVersion,omitempty"` + + // TimelineType The type of Timeline. + TimelineType *SecurityTimelineAPITimelineType `json:"timelineType,omitempty"` + + // Title The Timeline's title. + Title *string `json:"title,omitempty"` + + // Updated The last time the Timeline was updated, using a 13-digit Epoch timestamp + Updated *float32 `json:"updated,omitempty"` + + // UpdatedBy The user who last updated the Timeline + UpdatedBy *string `json:"updatedBy,omitempty"` + + // Version The version of the Timeline or Timeline template + Version string `json:"version"` +} + +// SecurityTimelineAPITimelineResponseDateRangeEnd0 defines model for . +type SecurityTimelineAPITimelineResponseDateRangeEnd0 = string + +// SecurityTimelineAPITimelineResponseDateRangeEnd1 defines model for . +type SecurityTimelineAPITimelineResponseDateRangeEnd1 = float32 + +// SecurityTimelineAPITimelineResponse_DateRange_End defines model for SecurityTimelineAPITimelineResponse.DateRange.End. +type SecurityTimelineAPITimelineResponse_DateRange_End struct { + union json.RawMessage +} + +// SecurityTimelineAPITimelineResponseDateRangeStart0 defines model for . +type SecurityTimelineAPITimelineResponseDateRangeStart0 = string + +// SecurityTimelineAPITimelineResponseDateRangeStart1 defines model for . +type SecurityTimelineAPITimelineResponseDateRangeStart1 = float32 + +// SecurityTimelineAPITimelineResponse_DateRange_Start defines model for SecurityTimelineAPITimelineResponse.DateRange.Start. +type SecurityTimelineAPITimelineResponse_DateRange_Start struct { + union json.RawMessage +} + +// SecurityTimelineAPITimelineResponseEqlOptionsSize0 defines model for . +type SecurityTimelineAPITimelineResponseEqlOptionsSize0 = string + +// SecurityTimelineAPITimelineResponseEqlOptionsSize1 defines model for . +type SecurityTimelineAPITimelineResponseEqlOptionsSize1 = float32 + +// SecurityTimelineAPITimelineResponse_EqlOptions_Size defines model for SecurityTimelineAPITimelineResponse.EqlOptions.Size. +type SecurityTimelineAPITimelineResponse_EqlOptions_Size struct { + union json.RawMessage +} + +// SecurityTimelineAPITimelineSavedToReturnObject defines model for Security_Timeline_API_TimelineSavedToReturnObject. +type SecurityTimelineAPITimelineSavedToReturnObject struct { + // Columns The Timeline's columns + Columns *[]SecurityTimelineAPIColumnHeaderResult `json:"columns,omitempty"` + + // Created The time the Timeline was created, using a 13-digit Epoch timestamp. + Created *float32 `json:"created,omitempty"` + + // CreatedBy The user who created the Timeline. + CreatedBy *string `json:"createdBy,omitempty"` + + // DataProviders Object containing query clauses + DataProviders *[]SecurityTimelineAPIDataProviderResult `json:"dataProviders,omitempty"` + + // DataViewId ID of the Timeline's Data View + DataViewId *string `json:"dataViewId,omitempty"` + + // DateRange The Timeline's search period. + DateRange *struct { + End *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End `json:"end,omitempty"` + Start *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start `json:"start,omitempty"` + } `json:"dateRange,omitempty"` + + // Description The Timeline's description + Description *string `json:"description,omitempty"` + + // EqlOptions EQL query that is used in the correlation tab + EqlOptions *struct { + EventCategoryField *string `json:"eventCategoryField,omitempty"` + Query *string `json:"query,omitempty"` + Size *SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size `json:"size,omitempty"` + TiebreakerField *string `json:"tiebreakerField,omitempty"` + TimestampField *string `json:"timestampField,omitempty"` + } `json:"eqlOptions,omitempty"` + EventIdToNoteIds *[]SecurityTimelineAPINote `json:"eventIdToNoteIds,omitempty"` + + // EventType Event types displayed in the Timeline + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + EventType *string `json:"eventType,omitempty"` + + // ExcludedRowRendererIds A list of row renderers that should not be used when in `Event renderers` mode + ExcludedRowRendererIds *[]SecurityTimelineAPIRowRendererId `json:"excludedRowRendererIds,omitempty"` + Favorite *[]SecurityTimelineAPIFavoriteTimelineResult `json:"favorite,omitempty"` + + // Filters A list of filters that should be applied to the query + Filters *[]SecurityTimelineAPIFilterTimelineResult `json:"filters,omitempty"` + + // IndexNames A list of index names to use in the query (e.g. when the default data view has been modified) + IndexNames *[]string `json:"indexNames,omitempty"` + + // KqlMode Indicates whether the KQL bar filters the query results or searches for additional results, where: + // * `filter`: filters query results + // * `search`: displays additional search results + KqlMode *string `json:"kqlMode,omitempty"` + + // KqlQuery KQL bar query. + KqlQuery *SecurityTimelineAPISerializedFilterQueryResult `json:"kqlQuery,omitempty"` + NoteIds *[]string `json:"noteIds,omitempty"` + Notes *[]SecurityTimelineAPINote `json:"notes,omitempty"` + PinnedEventIds *[]string `json:"pinnedEventIds,omitempty"` + PinnedEventsSaveObject *[]SecurityTimelineAPIPinnedEvent `json:"pinnedEventsSaveObject,omitempty"` + SavedObjectId string `json:"savedObjectId"` + + // SavedQueryId The ID of the saved query that might be used in the Query tab + SavedQueryId *string `json:"savedQueryId,omitempty"` + + // SavedSearchId The ID of the saved search that is used in the ES|QL tab + SavedSearchId *string `json:"savedSearchId,omitempty"` + Sort *SecurityTimelineAPISort `json:"sort,omitempty"` + + // Status The status of the Timeline. + Status *SecurityTimelineAPITimelineStatus `json:"status,omitempty"` + + // TemplateTimelineId A unique ID (UUID) for Timeline templates. For Timelines, the value is `null`. + TemplateTimelineId *string `json:"templateTimelineId,omitempty"` + + // TemplateTimelineVersion Timeline template version number. For Timelines, the value is `null`. + TemplateTimelineVersion *float32 `json:"templateTimelineVersion,omitempty"` + + // TimelineType The type of Timeline. + TimelineType *SecurityTimelineAPITimelineType `json:"timelineType,omitempty"` + + // Title The Timeline's title. + Title *string `json:"title,omitempty"` + + // Updated The last time the Timeline was updated, using a 13-digit Epoch timestamp + Updated *float32 `json:"updated,omitempty"` + + // UpdatedBy The user who last updated the Timeline + UpdatedBy *string `json:"updatedBy,omitempty"` + Version string `json:"version"` +} + +// SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0 defines model for . +type SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0 = string + +// SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1 defines model for . +type SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1 = float32 + +// SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End defines model for SecurityTimelineAPITimelineSavedToReturnObject.DateRange.End. +type SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End struct { + union json.RawMessage +} + +// SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0 defines model for . +type SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0 = string + +// SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1 defines model for . +type SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1 = float32 + +// SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start defines model for SecurityTimelineAPITimelineSavedToReturnObject.DateRange.Start. +type SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start struct { + union json.RawMessage +} + +// SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0 defines model for . +type SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0 = string + +// SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1 defines model for . +type SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1 = float32 + +// SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size defines model for SecurityTimelineAPITimelineSavedToReturnObject.EqlOptions.Size. +type SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size struct { + union json.RawMessage +} + +// SecurityTimelineAPITimelineStatus The status of the Timeline. +type SecurityTimelineAPITimelineStatus string + +// SecurityTimelineAPITimelineType The type of Timeline. +type SecurityTimelineAPITimelineType string + +// ShortURLAPIsUrlResponse defines model for Short_URL_APIs_urlResponse. +type ShortURLAPIsUrlResponse struct { + AccessCount *int `json:"accessCount,omitempty"` + AccessDate *string `json:"accessDate,omitempty"` + CreateDate *string `json:"createDate,omitempty"` + + // Id The identifier for the short URL. + Id *string `json:"id,omitempty"` + Locator *struct { + // Id The identifier for the locator. + Id *string `json:"id,omitempty"` + + // State The locator parameters. + State *map[string]interface{} `json:"state,omitempty"` + + // Version The version of Kibana when the short URL was created. + Version *string `json:"version,omitempty"` + } `json:"locator,omitempty"` + + // Slug A random human-readable slug is automatically generated if the `humanReadableSlug` parameter is set to `true`. If it is set to `false`, a random short string is generated. + Slug *string `json:"slug,omitempty"` +} + +// SyntheticsBrowserMonitorFields defines model for Synthetics_browserMonitorFields. +type SyntheticsBrowserMonitorFields struct { + // Alert The alert configuration. The default is `{ status: { enabled: true }, tls: { enabled: true } }`. + Alert *map[string]interface{} `json:"alert,omitempty"` + + // Enabled Specify whether the monitor is enabled. + Enabled *bool `json:"enabled,omitempty"` + + // IgnoreHttpsErrors Ignore HTTPS errors. + IgnoreHttpsErrors *bool `json:"ignore_https_errors,omitempty"` + + // InlineScript The inline script. + InlineScript string `json:"inline_script"` + + // Labels Key-value pairs of labels to associate with the monitor. Labels can be used for filtering and grouping monitors. + Labels *map[string]string `json:"labels,omitempty"` + + // Locations The location to deploy the monitor. + // Monitors can be deployed in multiple locations so that you can detect differences in availability and response times across those locations. + // To list available locations you can: + // + // - Run the `elastic-synthetics locations` command with the deployment's Kibana URL. + // - Go to *Synthetics > Management* and click *Create monitor*. Locations will be listed in *Locations*. + Locations *[]string `json:"locations,omitempty"` + + // Name The monitor name. + Name string `json:"name"` + + // Namespace The namespace field should be lowercase and not contain spaces. The namespace must not include any of the following characters: `*`, `\`, `/`, `?`, `"`, `<`, `>`, `|`, whitespace, `,`, `#`, `:`, or `-`. + Namespace *string `json:"namespace,omitempty"` + + // Params The monitor parameters. + Params *string `json:"params,omitempty"` + + // PlaywrightOptions Playwright options. + PlaywrightOptions *map[string]interface{} `json:"playwright_options,omitempty"` + + // PrivateLocations The private locations to which the monitors will be deployed. + // These private locations refer to locations hosted and managed by you, whereas `locations` are hosted by Elastic. + // You can specify a private location using the location's name. + // To list available private locations you can: + // + // - Run the `elastic-synthetics locations` command with the deployment's Kibana URL. + // - Go to *Synthetics > Settings* and click *Private locationsr*. Private locations will be listed in the table. + // + // > info + // > You can provide `locations` or `private_locations` or both. At least one is required. + PrivateLocations *[]string `json:"private_locations,omitempty"` + + // RetestOnFailure Turn retesting for when a monitor fails on or off. By default, monitors are automatically retested if the monitor goes from "up" to "down". If the result of the retest is also "down", an error will be created and if configured, an alert sent. The monitor will then resume running according to the defined schedule. Using `retest_on_failure` can reduce noise related to transient problems. + RetestOnFailure *bool `json:"retest_on_failure,omitempty"` + + // Schedule The monitor's schedule in minutes. Supported values are `1`, `3`, `5`, `10`, `15`, `30`, `60`, `120`, and `240`. The default value is `3` minutes for HTTP, TCP, and ICMP monitors. The default value is `10` minutes for Browser monitors. + Schedule *float32 `json:"schedule,omitempty"` + + // Screenshots The screenshot option. + Screenshots *SyntheticsBrowserMonitorFieldsScreenshots `json:"screenshots,omitempty"` + + // ServiceName The APM service name. + ServiceName *string `json:"service.name,omitempty"` + + // SyntheticsArgs Synthetics agent CLI arguments. + SyntheticsArgs *[]interface{} `json:"synthetics_args,omitempty"` + + // Tags An array of tags. + Tags *[]string `json:"tags,omitempty"` + + // Timeout The monitor timeout in seconds. The monitor will fail if it doesn't complete within this time. + Timeout *float32 `json:"timeout,omitempty"` + + // Type The monitor type. + Type SyntheticsBrowserMonitorFieldsType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// SyntheticsBrowserMonitorFieldsScreenshots The screenshot option. +type SyntheticsBrowserMonitorFieldsScreenshots string + +// SyntheticsBrowserMonitorFieldsType The monitor type. +type SyntheticsBrowserMonitorFieldsType string + +// SyntheticsCommonMonitorFields defines model for Synthetics_commonMonitorFields. +type SyntheticsCommonMonitorFields struct { + // Alert The alert configuration. The default is `{ status: { enabled: true }, tls: { enabled: true } }`. + Alert *map[string]interface{} `json:"alert,omitempty"` + + // Enabled Specify whether the monitor is enabled. + Enabled *bool `json:"enabled,omitempty"` + + // Labels Key-value pairs of labels to associate with the monitor. Labels can be used for filtering and grouping monitors. + Labels *map[string]string `json:"labels,omitempty"` + + // Locations The location to deploy the monitor. + // Monitors can be deployed in multiple locations so that you can detect differences in availability and response times across those locations. + // To list available locations you can: + // + // - Run the `elastic-synthetics locations` command with the deployment's Kibana URL. + // - Go to *Synthetics > Management* and click *Create monitor*. Locations will be listed in *Locations*. + Locations *[]string `json:"locations,omitempty"` + + // Name The monitor name. + Name string `json:"name"` + + // Namespace The namespace field should be lowercase and not contain spaces. The namespace must not include any of the following characters: `*`, `\`, `/`, `?`, `"`, `<`, `>`, `|`, whitespace, `,`, `#`, `:`, or `-`. + Namespace *string `json:"namespace,omitempty"` + + // Params The monitor parameters. + Params *string `json:"params,omitempty"` + + // PrivateLocations The private locations to which the monitors will be deployed. + // These private locations refer to locations hosted and managed by you, whereas `locations` are hosted by Elastic. + // You can specify a private location using the location's name. + // To list available private locations you can: + // + // - Run the `elastic-synthetics locations` command with the deployment's Kibana URL. + // - Go to *Synthetics > Settings* and click *Private locationsr*. Private locations will be listed in the table. + // + // > info + // > You can provide `locations` or `private_locations` or both. At least one is required. + PrivateLocations *[]string `json:"private_locations,omitempty"` + + // RetestOnFailure Turn retesting for when a monitor fails on or off. By default, monitors are automatically retested if the monitor goes from "up" to "down". If the result of the retest is also "down", an error will be created and if configured, an alert sent. The monitor will then resume running according to the defined schedule. Using `retest_on_failure` can reduce noise related to transient problems. + RetestOnFailure *bool `json:"retest_on_failure,omitempty"` + + // Schedule The monitor's schedule in minutes. Supported values are `1`, `3`, `5`, `10`, `15`, `30`, `60`, `120`, and `240`. The default value is `3` minutes for HTTP, TCP, and ICMP monitors. The default value is `10` minutes for Browser monitors. + Schedule *float32 `json:"schedule,omitempty"` + + // ServiceName The APM service name. + ServiceName *string `json:"service.name,omitempty"` + + // Tags An array of tags. + Tags *[]string `json:"tags,omitempty"` + + // Timeout The monitor timeout in seconds. The monitor will fail if it doesn't complete within this time. + Timeout *float32 `json:"timeout,omitempty"` +} + +// SyntheticsGetParameterResponse defines model for Synthetics_getParameterResponse. +type SyntheticsGetParameterResponse struct { + // Description The description of the parameter. It is included in the response if the user has read-only permissions to the Synthetics app. + Description *string `json:"description,omitempty"` + + // Id The unique identifier of the parameter. + Id *string `json:"id,omitempty"` + + // Key The key of the parameter. + Key *string `json:"key,omitempty"` + + // Namespaces The namespaces associated with the parameter. It is included in the response if the user has read-only permissions to the Synthetics app. + Namespaces *[]string `json:"namespaces,omitempty"` + + // Tags An array of tags associated with the parameter. It is included in the response if the user has read-only permissions to the Synthetics app. + Tags *[]string `json:"tags,omitempty"` + + // Value The value associated with the parameter. It will be included in the response if the user has write permissions. + Value *string `json:"value,omitempty"` +} + +// SyntheticsGetPrivateLocation defines model for Synthetics_getPrivateLocation. +type SyntheticsGetPrivateLocation struct { + // AgentPolicyId The ID of the agent policy associated with the private location. + AgentPolicyId *string `json:"agentPolicyId,omitempty"` + + // Geo Geographic coordinates (WGS84) for the location. + Geo *struct { + // Lat The latitude of the location. + Lat float32 `json:"lat"` + + // Lon The longitude of the location. + Lon float32 `json:"lon"` + } `json:"geo,omitempty"` + + // Id The unique identifier of the private location. + Id *string `json:"id,omitempty"` + + // IsInvalid Indicates whether the location is invalid. If `true`, the location is invalid, which means the agent policy associated with the location is deleted. + IsInvalid *bool `json:"isInvalid,omitempty"` + + // Label A label for the private location. + Label *string `json:"label,omitempty"` + + // Namespace The namespace of the location, which is the same as the namespace of the agent policy associated with the location. + Namespace *string `json:"namespace,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// SyntheticsHttpMonitorFields defines model for Synthetics_httpMonitorFields. +type SyntheticsHttpMonitorFields struct { + // Alert The alert configuration. The default is `{ status: { enabled: true }, tls: { enabled: true } }`. + Alert *map[string]interface{} `json:"alert,omitempty"` + + // Check The check request settings. + Check *struct { + // Request An optional request to send to the remote host. + Request *struct { + // Body Optional request body content. + Body *string `json:"body,omitempty"` + + // Headers A dictionary of additional HTTP headers to send. By default, Synthetics will set the User-Agent header to identify itself. + Headers *map[string]interface{} `json:"headers,omitempty"` + + // Method The HTTP method to use. + Method *SyntheticsHttpMonitorFieldsCheckRequestMethod `json:"method,omitempty"` + } `json:"request,omitempty"` + + // Response The expected response. + Response *SyntheticsHttpMonitorFields_Check_Response `json:"response,omitempty"` + } `json:"check,omitempty"` + + // Enabled Specify whether the monitor is enabled. + Enabled *bool `json:"enabled,omitempty"` + + // Ipv4 If `true`, ping using the ipv4 protocol. + Ipv4 *bool `json:"ipv4,omitempty"` + + // Ipv6 If `true`, ping using the ipv6 protocol. + Ipv6 *bool `json:"ipv6,omitempty"` + + // Labels Key-value pairs of labels to associate with the monitor. Labels can be used for filtering and grouping monitors. + Labels *map[string]string `json:"labels,omitempty"` + + // Locations The location to deploy the monitor. + // Monitors can be deployed in multiple locations so that you can detect differences in availability and response times across those locations. + // To list available locations you can: + // + // - Run the `elastic-synthetics locations` command with the deployment's Kibana URL. + // - Go to *Synthetics > Management* and click *Create monitor*. Locations will be listed in *Locations*. + Locations *[]string `json:"locations,omitempty"` + + // MaxRedirects The maximum number of redirects to follow. + MaxRedirects *float32 `json:"max_redirects,omitempty"` + + // Mode The mode of the monitor. If it is `all`, the monitor pings all resolvable IPs for a hostname. If it is `any`, the monitor pings only one IP address for a hostname. If you're using a DNS-load balancer and want to ping every IP address for the specified hostname, you should use `all`. + Mode *SyntheticsHttpMonitorFieldsMode `json:"mode,omitempty"` + + // Name The monitor name. + Name string `json:"name"` + + // Namespace The namespace field should be lowercase and not contain spaces. The namespace must not include any of the following characters: `*`, `\`, `/`, `?`, `"`, `<`, `>`, `|`, whitespace, `,`, `#`, `:`, or `-`. + Namespace *string `json:"namespace,omitempty"` + + // Params The monitor parameters. + Params *string `json:"params,omitempty"` + + // Password The password for authenticating with the server. The credentials are passed with the request. + Password *string `json:"password,omitempty"` + + // PrivateLocations The private locations to which the monitors will be deployed. + // These private locations refer to locations hosted and managed by you, whereas `locations` are hosted by Elastic. + // You can specify a private location using the location's name. + // To list available private locations you can: + // + // - Run the `elastic-synthetics locations` command with the deployment's Kibana URL. + // - Go to *Synthetics > Settings* and click *Private locationsr*. Private locations will be listed in the table. + // + // > info + // > You can provide `locations` or `private_locations` or both. At least one is required. + PrivateLocations *[]string `json:"private_locations,omitempty"` + + // ProxyHeaders Additional headers to send to proxies during CONNECT requests. + ProxyHeaders *map[string]interface{} `json:"proxy_headers,omitempty"` + + // ProxyUrl The URL of the proxy to use for this monitor. + ProxyUrl *string `json:"proxy_url,omitempty"` + + // Response Controls the indexing of the HTTP response body contents to the `http.response.body.contents field`. + Response *map[string]interface{} `json:"response,omitempty"` + + // RetestOnFailure Turn retesting for when a monitor fails on or off. By default, monitors are automatically retested if the monitor goes from "up" to "down". If the result of the retest is also "down", an error will be created and if configured, an alert sent. The monitor will then resume running according to the defined schedule. Using `retest_on_failure` can reduce noise related to transient problems. + RetestOnFailure *bool `json:"retest_on_failure,omitempty"` + + // Schedule The monitor's schedule in minutes. Supported values are `1`, `3`, `5`, `10`, `15`, `30`, `60`, `120`, and `240`. The default value is `3` minutes for HTTP, TCP, and ICMP monitors. The default value is `10` minutes for Browser monitors. + Schedule *float32 `json:"schedule,omitempty"` + + // ServiceName The APM service name. + ServiceName *string `json:"service.name,omitempty"` + + // Ssl The TLS/SSL connection settings for use with the HTTPS endpoint. If you don't specify settings, the system defaults are used. + Ssl *map[string]interface{} `json:"ssl,omitempty"` + + // Tags An array of tags. + Tags *[]string `json:"tags,omitempty"` + + // Timeout The monitor timeout in seconds. The monitor will fail if it doesn't complete within this time. + Timeout *float32 `json:"timeout,omitempty"` + + // Type The monitor type. + Type SyntheticsHttpMonitorFieldsType `json:"type"` + + // Url The URL to monitor. + Url string `json:"url"` + + // Username The username for authenticating with the server. The credentials are passed with the request. + Username *string `json:"username,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// SyntheticsHttpMonitorFieldsCheckRequestMethod The HTTP method to use. +type SyntheticsHttpMonitorFieldsCheckRequestMethod string + +// SyntheticsHttpMonitorFields_Check_Response The expected response. +type SyntheticsHttpMonitorFields_Check_Response struct { + Body *map[string]interface{} `json:"body,omitempty"` + + // Headers A dictionary of expected HTTP headers. If the header is not found, the check fails. + Headers *map[string]interface{} `json:"headers,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// SyntheticsHttpMonitorFieldsMode The mode of the monitor. If it is `all`, the monitor pings all resolvable IPs for a hostname. If it is `any`, the monitor pings only one IP address for a hostname. If you're using a DNS-load balancer and want to ping every IP address for the specified hostname, you should use `all`. +type SyntheticsHttpMonitorFieldsMode string + +// SyntheticsHttpMonitorFieldsType The monitor type. +type SyntheticsHttpMonitorFieldsType string + +// SyntheticsIcmpMonitorFields defines model for Synthetics_icmpMonitorFields. +type SyntheticsIcmpMonitorFields struct { + // Alert The alert configuration. The default is `{ status: { enabled: true }, tls: { enabled: true } }`. + Alert *map[string]interface{} `json:"alert,omitempty"` + + // Enabled Specify whether the monitor is enabled. + Enabled *bool `json:"enabled,omitempty"` + + // Host The host to ping. + Host string `json:"host"` + + // Labels Key-value pairs of labels to associate with the monitor. Labels can be used for filtering and grouping monitors. + Labels *map[string]string `json:"labels,omitempty"` + + // Locations The location to deploy the monitor. + // Monitors can be deployed in multiple locations so that you can detect differences in availability and response times across those locations. + // To list available locations you can: + // + // - Run the `elastic-synthetics locations` command with the deployment's Kibana URL. + // - Go to *Synthetics > Management* and click *Create monitor*. Locations will be listed in *Locations*. + Locations *[]string `json:"locations,omitempty"` + + // Name The monitor name. + Name string `json:"name"` + + // Namespace The namespace field should be lowercase and not contain spaces. The namespace must not include any of the following characters: `*`, `\`, `/`, `?`, `"`, `<`, `>`, `|`, whitespace, `,`, `#`, `:`, or `-`. + Namespace *string `json:"namespace,omitempty"` + + // Params The monitor parameters. + Params *string `json:"params,omitempty"` + + // PrivateLocations The private locations to which the monitors will be deployed. + // These private locations refer to locations hosted and managed by you, whereas `locations` are hosted by Elastic. + // You can specify a private location using the location's name. + // To list available private locations you can: + // + // - Run the `elastic-synthetics locations` command with the deployment's Kibana URL. + // - Go to *Synthetics > Settings* and click *Private locationsr*. Private locations will be listed in the table. + // + // > info + // > You can provide `locations` or `private_locations` or both. At least one is required. + PrivateLocations *[]string `json:"private_locations,omitempty"` + + // RetestOnFailure Turn retesting for when a monitor fails on or off. By default, monitors are automatically retested if the monitor goes from "up" to "down". If the result of the retest is also "down", an error will be created and if configured, an alert sent. The monitor will then resume running according to the defined schedule. Using `retest_on_failure` can reduce noise related to transient problems. + RetestOnFailure *bool `json:"retest_on_failure,omitempty"` + + // Schedule The monitor's schedule in minutes. Supported values are `1`, `3`, `5`, `10`, `15`, `30`, `60`, `120`, and `240`. The default value is `3` minutes for HTTP, TCP, and ICMP monitors. The default value is `10` minutes for Browser monitors. + Schedule *float32 `json:"schedule,omitempty"` + + // ServiceName The APM service name. + ServiceName *string `json:"service.name,omitempty"` + + // Tags An array of tags. + Tags *[]string `json:"tags,omitempty"` + + // Timeout The monitor timeout in seconds. The monitor will fail if it doesn't complete within this time. + Timeout *float32 `json:"timeout,omitempty"` + + // Type The monitor type. + Type SyntheticsIcmpMonitorFieldsType `json:"type"` + + // Wait The wait time in seconds. + Wait *float32 `json:"wait,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// SyntheticsIcmpMonitorFieldsType The monitor type. +type SyntheticsIcmpMonitorFieldsType string + +// SyntheticsParameterRequest defines model for Synthetics_parameterRequest. +type SyntheticsParameterRequest struct { + // Description A description of the parameter. + Description *string `json:"description,omitempty"` + + // Key The key of the parameter. + Key string `json:"key"` + + // ShareAcrossSpaces Specify whether the parameter should be shared across spaces. + ShareAcrossSpaces *bool `json:"share_across_spaces,omitempty"` + + // Tags An array of tags to categorize the parameter. + Tags *[]string `json:"tags,omitempty"` + + // Value The value associated with the parameter. + Value string `json:"value"` +} + +// SyntheticsPostParameterResponse defines model for Synthetics_postParameterResponse. +type SyntheticsPostParameterResponse struct { + // Description A description of the parameter. + Description *string `json:"description,omitempty"` + + // Id The unique identifier for the parameter. + Id *string `json:"id,omitempty"` + + // Key The parameter key. + Key *string `json:"key,omitempty"` + + // ShareAcrossSpaces Indicates whether the parameter is shared across spaces. + ShareAcrossSpaces *bool `json:"share_across_spaces,omitempty"` + + // Tags An array of tags associated with the parameter. + Tags *[]string `json:"tags,omitempty"` + + // Value The value associated with the parameter. + Value *string `json:"value,omitempty"` +} + +// SyntheticsTcpMonitorFields defines model for Synthetics_tcpMonitorFields. +type SyntheticsTcpMonitorFields struct { + // Alert The alert configuration. The default is `{ status: { enabled: true }, tls: { enabled: true } }`. + Alert *map[string]interface{} `json:"alert,omitempty"` + + // Enabled Specify whether the monitor is enabled. + Enabled *bool `json:"enabled,omitempty"` + + // Host The host to monitor; it can be an IP address or a hostname. The host can include the port using a colon, for example "example.com:9200". + Host string `json:"host"` + + // Labels Key-value pairs of labels to associate with the monitor. Labels can be used for filtering and grouping monitors. + Labels *map[string]string `json:"labels,omitempty"` + + // Locations The location to deploy the monitor. + // Monitors can be deployed in multiple locations so that you can detect differences in availability and response times across those locations. + // To list available locations you can: + // + // - Run the `elastic-synthetics locations` command with the deployment's Kibana URL. + // - Go to *Synthetics > Management* and click *Create monitor*. Locations will be listed in *Locations*. + Locations *[]string `json:"locations,omitempty"` + + // Name The monitor name. + Name string `json:"name"` + + // Namespace The namespace field should be lowercase and not contain spaces. The namespace must not include any of the following characters: `*`, `\`, `/`, `?`, `"`, `<`, `>`, `|`, whitespace, `,`, `#`, `:`, or `-`. + Namespace *string `json:"namespace,omitempty"` + + // Params The monitor parameters. + Params *string `json:"params,omitempty"` + + // PrivateLocations The private locations to which the monitors will be deployed. + // These private locations refer to locations hosted and managed by you, whereas `locations` are hosted by Elastic. + // You can specify a private location using the location's name. + // To list available private locations you can: + // + // - Run the `elastic-synthetics locations` command with the deployment's Kibana URL. + // - Go to *Synthetics > Settings* and click *Private locationsr*. Private locations will be listed in the table. + // + // > info + // > You can provide `locations` or `private_locations` or both. At least one is required. + PrivateLocations *[]string `json:"private_locations,omitempty"` + + // ProxyUrl The URL of the SOCKS5 proxy to use when connecting to the server. The value must be a URL with a scheme of `socks5://`. If the SOCKS5 proxy server requires client authentication, then a username and password can be embedded in the URL. When using a proxy, hostnames are resolved on the proxy server instead of on the client. You can change this behavior by setting the `proxy_use_local_resolver` option. + ProxyUrl *string `json:"proxy_url,omitempty"` + + // ProxyUseLocalResolver Specify that hostnames are resolved locally instead of being resolved on the proxy server. If `false`, name resolution occurs on the proxy server. + ProxyUseLocalResolver *bool `json:"proxy_use_local_resolver,omitempty"` + + // RetestOnFailure Turn retesting for when a monitor fails on or off. By default, monitors are automatically retested if the monitor goes from "up" to "down". If the result of the retest is also "down", an error will be created and if configured, an alert sent. The monitor will then resume running according to the defined schedule. Using `retest_on_failure` can reduce noise related to transient problems. + RetestOnFailure *bool `json:"retest_on_failure,omitempty"` + + // Schedule The monitor's schedule in minutes. Supported values are `1`, `3`, `5`, `10`, `15`, `30`, `60`, `120`, and `240`. The default value is `3` minutes for HTTP, TCP, and ICMP monitors. The default value is `10` minutes for Browser monitors. + Schedule *float32 `json:"schedule,omitempty"` + + // ServiceName The APM service name. + ServiceName *string `json:"service.name,omitempty"` + + // Ssl The TLS/SSL connection settings for use with the HTTPS endpoint. If you don't specify settings, the system defaults are used. + Ssl *map[string]interface{} `json:"ssl,omitempty"` + + // Tags An array of tags. + Tags *[]string `json:"tags,omitempty"` + + // Timeout The monitor timeout in seconds. The monitor will fail if it doesn't complete within this time. + Timeout *float32 `json:"timeout,omitempty"` + + // Type The monitor type. + Type SyntheticsTcpMonitorFieldsType `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// SyntheticsTcpMonitorFieldsType The monitor type. +type SyntheticsTcpMonitorFieldsType string + +// TaskManagerHealthAPIsConfiguration This object summarizes the current configuration of Task Manager. This includes dynamic configurations that change over time, such as `poll_interval` and `max_workers`, which can adjust in reaction to changing load on the system. +type TaskManagerHealthAPIsConfiguration = map[string]interface{} + +// TaskManagerHealthAPIsHealthResponse defines model for Task_manager_health_APIs_health_response. +type TaskManagerHealthAPIsHealthResponse struct { + Id *string `json:"id,omitempty"` + LastUpdate *string `json:"last_update,omitempty"` + Stats *struct { + // CapacityEstimation This object provides a rough estimate about the sufficiency of its capacity. These are estimates based on historical data and should not be used as predictions. + CapacityEstimation *map[string]interface{} `json:"capacity_estimation,omitempty"` + + // Configuration This object summarizes the current configuration of Task Manager. This includes dynamic configurations that change over time, such as `poll_interval` and `max_workers`, which can adjust in reaction to changing load on the system. + Configuration *TaskManagerHealthAPIsConfiguration `json:"configuration,omitempty"` + + // Runtime This object tracks runtime performance of Task Manager, tracking task drift, worker load, and stats broken down by type, including duration and run results. + Runtime *map[string]interface{} `json:"runtime,omitempty"` + + // Workload This object summarizes the work load across the cluster, including the tasks in the system, their types, and current status. + Workload *TaskManagerHealthAPIsWorkload `json:"workload,omitempty"` + } `json:"stats,omitempty"` + Status *string `json:"status,omitempty"` + Timestamp *string `json:"timestamp,omitempty"` +} + +// TaskManagerHealthAPIsWorkload This object summarizes the work load across the cluster, including the tasks in the system, their types, and current status. +type TaskManagerHealthAPIsWorkload = map[string]interface{} + +// AgentPolicy defines model for agent_policy. +type AgentPolicy struct { + AdvancedSettings *struct { + AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory,omitempty"` + AgentDownloadTimeout interface{} `json:"agent_download_timeout,omitempty"` + AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs,omitempty"` + AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval,omitempty"` + AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles,omitempty"` + AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes,omitempty"` + AgentLoggingLevel interface{} `json:"agent_logging_level,omitempty"` + AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period,omitempty"` + AgentLoggingToFiles interface{} `json:"agent_logging_to_files,omitempty"` + AgentMonitoringRuntimeExperimental interface{} `json:"agent_monitoring_runtime_experimental,omitempty"` + } `json:"advanced_settings,omitempty"` + AgentFeatures *[]struct { + Enabled bool `json:"enabled"` + Name string `json:"name"` + } `json:"agent_features,omitempty"` + Agentless *struct { + CloudConnectors *struct { + Enabled bool `json:"enabled"` + TargetCsp *string `json:"target_csp,omitempty"` + } `json:"cloud_connectors,omitempty"` + Resources *struct { + Requests *struct { + Cpu *string `json:"cpu,omitempty"` + Memory *string `json:"memory,omitempty"` + } `json:"requests,omitempty"` + } `json:"resources,omitempty"` + } `json:"agentless,omitempty"` + Agents *float32 `json:"agents,omitempty"` + DataOutputId *string `json:"data_output_id,omitempty"` + Description *string `json:"description,omitempty"` + DownloadSourceId *string `json:"download_source_id,omitempty"` + FleetServerHostId *string `json:"fleet_server_host_id,omitempty"` + + // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. + GlobalDataTags *[]AgentPolicyGlobalDataTagsItem `json:"global_data_tags,omitempty"` + HasFleetServer *bool `json:"has_fleet_server,omitempty"` + Id string `json:"id"` + InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` + IsManaged bool `json:"is_managed"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + + // IsProtected Indicates whether the agent policy has tamper protection enabled. Default false. + IsProtected bool `json:"is_protected"` + + // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled + KeepMonitoringAlive *bool `json:"keep_monitoring_alive,omitempty"` + MonitoringDiagnostics *struct { + Limit *struct { + Burst *float32 `json:"burst,omitempty"` + Interval *string `json:"interval,omitempty"` + } `json:"limit,omitempty"` + Uploader *struct { + InitDur *string `json:"init_dur,omitempty"` + MaxDur *string `json:"max_dur,omitempty"` + MaxRetries *float32 `json:"max_retries,omitempty"` + } `json:"uploader,omitempty"` + } `json:"monitoring_diagnostics,omitempty"` + MonitoringEnabled *[]AgentPolicyMonitoringEnabled `json:"monitoring_enabled,omitempty"` + MonitoringHttp *struct { + Buffer *struct { + Enabled *bool `json:"enabled,omitempty"` + } `json:"buffer,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + Host *string `json:"host,omitempty"` + Port *float32 `json:"port,omitempty"` + } `json:"monitoring_http,omitempty"` + MonitoringOutputId *string `json:"monitoring_output_id,omitempty"` + MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` + Name string `json:"name"` + Namespace string `json:"namespace"` + + // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *map[string]interface{} `json:"overrides,omitempty"` + PackagePolicies *AgentPolicy_PackagePolicies `json:"package_policies,omitempty"` + RequiredVersions *[]struct { + // Percentage Target percentage of agents to auto upgrade + Percentage float32 `json:"percentage"` + + // Version Target version for automatic agent upgrade + Version string `json:"version"` + } `json:"required_versions,omitempty"` + Revision float32 `json:"revision"` + SchemaVersion *string `json:"schema_version,omitempty"` + SpaceIds *[]string `json:"space_ids,omitempty"` + Status AgentPolicyStatus `json:"status"` + + // SupportsAgentless Indicates whether the agent policy supports agentless integrations. + SupportsAgentless *bool `json:"supports_agentless,omitempty"` + UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` + UnprivilegedAgents *float32 `json:"unprivileged_agents,omitempty"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Version *string `json:"version,omitempty"` +} + +// AgentPolicyMonitoringEnabled defines model for AgentPolicy.MonitoringEnabled. +type AgentPolicyMonitoringEnabled string + +// AgentPolicyPackagePolicies0 defines model for . +type AgentPolicyPackagePolicies0 = []string + +// AgentPolicyPackagePolicies1 This field is present only when retrieving a single agent policy, or when retrieving a list of agent policies with the ?full=true parameter +type AgentPolicyPackagePolicies1 = []struct { + // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. + AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions,omitempty"` + Agents *float32 `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + CreatedBy string `json:"created_by"` + + // Description Package policy description + Description *string `json:"description,omitempty"` + Elasticsearch *AgentPolicy_PackagePolicies_1_Elasticsearch `json:"elasticsearch,omitempty"` + Enabled bool `json:"enabled"` + Id string `json:"id"` + Inputs AgentPolicy_PackagePolicies_1_Inputs `json:"inputs"` + IsManaged *bool `json:"is_managed,omitempty"` + + // Name Package policy name (should be unique) + Name string `json:"name"` + + // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id,omitempty"` + + // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *struct { + Inputs *map[string]interface{} `json:"inputs,omitempty"` + } `json:"overrides,omitempty"` + Package *struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` + } `json:"package,omitempty"` + + // PolicyId Agent policy ID where that package policy will be added + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id,omitempty"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + Revision float32 `json:"revision"` + SecretReferences *[]struct { + Id string `json:"id"` + } `json:"secret_references,omitempty"` + SpaceIds *[]string `json:"spaceIds,omitempty"` + + // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. + SupportsAgentless *bool `json:"supports_agentless,omitempty"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Vars *AgentPolicy_PackagePolicies_1_Vars `json:"vars,omitempty"` + Version *string `json:"version,omitempty"` +} + +// AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges defines model for AgentPolicy.PackagePolicies.1.Elasticsearch.Privileges. +type AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges struct { + Cluster *[]string `json:"cluster,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// AgentPolicy_PackagePolicies_1_Elasticsearch defines model for AgentPolicy.PackagePolicies.1.Elasticsearch. +type AgentPolicy_PackagePolicies_1_Elasticsearch struct { + Privileges *AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges `json:"privileges,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// AgentPolicyPackagePolicies1Inputs0 defines model for . +type AgentPolicyPackagePolicies1Inputs0 = []struct { + CompiledInput interface{} `json:"compiled_input"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + PolicyTemplate *string `json:"policy_template,omitempty"` + Streams []struct { + CompiledStream interface{} `json:"compiled_stream"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + DataStream struct { + Dataset string `json:"dataset"` + Elasticsearch *struct { + DynamicDataset *bool `json:"dynamic_dataset,omitempty"` + DynamicNamespace *bool `json:"dynamic_namespace,omitempty"` + Privileges *struct { + Indices *[]string `json:"indices,omitempty"` + } `json:"privileges,omitempty"` + } `json:"elasticsearch,omitempty"` + Type string `json:"type"` + } `json:"data_stream"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + Release *AgentPolicyPackagePolicies1Inputs0StreamsRelease `json:"release,omitempty"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` + } `json:"streams"` + Type string `json:"type"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` +} + +// AgentPolicyPackagePolicies1Inputs0StreamsRelease defines model for AgentPolicy.PackagePolicies.1.Inputs.0.Streams.Release. +type AgentPolicyPackagePolicies1Inputs0StreamsRelease string + +// AgentPolicyPackagePolicies1Inputs1 Package policy inputs (see integration documentation to know what inputs are available) +type AgentPolicyPackagePolicies1Inputs1 map[string]struct { + // Enabled enable or disable that input, (default to true) + Enabled *bool `json:"enabled,omitempty"` + + // Streams Input streams (see integration documentation to know what streams are available) + Streams *map[string]struct { + // Enabled enable or disable that stream, (default to true) + Enabled *bool `json:"enabled,omitempty"` + + // Vars Input/stream level variable (see integration documentation for more information) + Vars *map[string]*AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties `json:"vars,omitempty"` + } `json:"streams,omitempty"` + + // Vars Input/stream level variable (see integration documentation for more information) + Vars *map[string]*AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties `json:"vars,omitempty"` +} + +// AgentPolicyPackagePolicies1Inputs1StreamsVars0 defines model for . +type AgentPolicyPackagePolicies1Inputs1StreamsVars0 = bool + +// AgentPolicyPackagePolicies1Inputs1StreamsVars1 defines model for . +type AgentPolicyPackagePolicies1Inputs1StreamsVars1 = string + +// AgentPolicyPackagePolicies1Inputs1StreamsVars2 defines model for . +type AgentPolicyPackagePolicies1Inputs1StreamsVars2 = float32 + +// AgentPolicyPackagePolicies1Inputs1StreamsVars3 defines model for . +type AgentPolicyPackagePolicies1Inputs1StreamsVars3 = []string + +// AgentPolicyPackagePolicies1Inputs1StreamsVars4 defines model for . +type AgentPolicyPackagePolicies1Inputs1StreamsVars4 = []float32 + +// AgentPolicyPackagePolicies1Inputs1StreamsVars5 defines model for . +type AgentPolicyPackagePolicies1Inputs1StreamsVars5 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} + +// AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties defines model for AgentPolicy.PackagePolicies.1.Inputs.1.Streams.Vars.AdditionalProperties. +type AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties struct { + union json.RawMessage +} + +// AgentPolicyPackagePolicies1Inputs1Vars0 defines model for . +type AgentPolicyPackagePolicies1Inputs1Vars0 = bool + +// AgentPolicyPackagePolicies1Inputs1Vars1 defines model for . +type AgentPolicyPackagePolicies1Inputs1Vars1 = string + +// AgentPolicyPackagePolicies1Inputs1Vars2 defines model for . +type AgentPolicyPackagePolicies1Inputs1Vars2 = float32 + +// AgentPolicyPackagePolicies1Inputs1Vars3 defines model for . +type AgentPolicyPackagePolicies1Inputs1Vars3 = []string + +// AgentPolicyPackagePolicies1Inputs1Vars4 defines model for . +type AgentPolicyPackagePolicies1Inputs1Vars4 = []float32 + +// AgentPolicyPackagePolicies1Inputs1Vars5 defines model for . +type AgentPolicyPackagePolicies1Inputs1Vars5 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} + +// AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties defines model for AgentPolicy.PackagePolicies.1.Inputs.1.Vars.AdditionalProperties. +type AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties struct { + union json.RawMessage +} + +// AgentPolicy_PackagePolicies_1_Inputs defines model for AgentPolicy.PackagePolicies.1.Inputs. +type AgentPolicy_PackagePolicies_1_Inputs struct { + union json.RawMessage +} + +// AgentPolicyPackagePolicies1Vars0 Package variable (see integration documentation for more information) +type AgentPolicyPackagePolicies1Vars0 map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` +} + +// AgentPolicyPackagePolicies1Vars1 Input/stream level variable (see integration documentation for more information) +type AgentPolicyPackagePolicies1Vars1 map[string]*AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties + +// AgentPolicyPackagePolicies1Vars10 defines model for . +type AgentPolicyPackagePolicies1Vars10 = bool + +// AgentPolicyPackagePolicies1Vars11 defines model for . +type AgentPolicyPackagePolicies1Vars11 = string + +// AgentPolicyPackagePolicies1Vars12 defines model for . +type AgentPolicyPackagePolicies1Vars12 = float32 + +// AgentPolicyPackagePolicies1Vars13 defines model for . +type AgentPolicyPackagePolicies1Vars13 = []string + +// AgentPolicyPackagePolicies1Vars14 defines model for . +type AgentPolicyPackagePolicies1Vars14 = []float32 + +// AgentPolicyPackagePolicies1Vars15 defines model for . +type AgentPolicyPackagePolicies1Vars15 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} + +// AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties defines model for AgentPolicy.PackagePolicies.1.Vars.1.AdditionalProperties. +type AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties struct { + union json.RawMessage +} + +// AgentPolicy_PackagePolicies_1_Vars defines model for AgentPolicy.PackagePolicies.1.Vars. +type AgentPolicy_PackagePolicies_1_Vars struct { + union json.RawMessage +} + +// AgentPolicy_PackagePolicies defines model for AgentPolicy.PackagePolicies. +type AgentPolicy_PackagePolicies struct { + union json.RawMessage +} + +// AgentPolicyStatus defines model for AgentPolicy.Status. +type AgentPolicyStatus string + +// AgentPolicyGlobalDataTagsItem defines model for agent_policy_global_data_tags_item. +type AgentPolicyGlobalDataTagsItem struct { + Name string `json:"name"` + Value AgentPolicyGlobalDataTagsItem_Value `json:"value"` +} + +// AgentPolicyGlobalDataTagsItemValue0 defines model for . +type AgentPolicyGlobalDataTagsItemValue0 = string + +// AgentPolicyGlobalDataTagsItemValue1 defines model for . +type AgentPolicyGlobalDataTagsItemValue1 = float32 + +// AgentPolicyGlobalDataTagsItem_Value defines model for AgentPolicyGlobalDataTagsItem.Value. +type AgentPolicyGlobalDataTagsItem_Value struct { + union json.RawMessage +} + +// Aggfield The name of the numeric field that is used in the aggregation. This property is required when `aggType` is `avg`, `max`, `min` or `sum`. +type Aggfield = string + +// Aggtype The type of aggregation to perform. +type Aggtype string + +// AuthType The type of authentication to use: basic, SSL, or none. +type AuthType string + +// BedrockConfig Defines properties for connectors when type is `.bedrock`. +type BedrockConfig struct { + // ApiUrl The Amazon Bedrock request URL. + ApiUrl string `json:"apiUrl"` + + // DefaultModel The generative artificial intelligence model for Amazon Bedrock to use. Current support is for the Anthropic Claude models. + DefaultModel *string `json:"defaultModel,omitempty"` +} + +// BedrockSecrets Defines secrets for connectors when type is `.bedrock`. +type BedrockSecrets struct { + // AccessKey The AWS access key for authentication. + AccessKey string `json:"accessKey"` + + // Secret The AWS secret for authentication. + Secret string `json:"secret"` +} + +// Ca A base64 encoded version of the certificate authority file that the connector can trust to sign and validate certificates. This option is available for all authentication types. +type Ca = string + +// CasesWebhookConfig Defines properties for connectors when type is `.cases-webhook`. +type CasesWebhookConfig struct { + // AuthType The type of authentication to use: basic, SSL, or none. + AuthType *AuthType `json:"authType,omitempty"` + + // Ca A base64 encoded version of the certificate authority file that the connector can trust to sign and validate certificates. This option is available for all authentication types. + Ca *Ca `json:"ca,omitempty"` + + // CertType If the `authType` is `webhook-authentication-ssl`, specifies whether the certificate authentication data is in a CRT and key file format or a PFX file format. + CertType *CertType `json:"certType,omitempty"` + + // CreateCommentJson A JSON payload sent to the create comment URL to create a case comment. You can use variables to add Kibana Cases data to the payload. The required variable is `case.comment`. Due to Mustache template variables (the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated once the Mustache variables have been placed when the REST method runs. Manually ensure that the JSON is valid, disregarding the Mustache variables, so the later validation will pass. + CreateCommentJson *string `json:"createCommentJson,omitempty"` + + // CreateCommentMethod The REST API HTTP request method to create a case comment in the third-party system. Valid values are `patch`, `post`, and `put`. + CreateCommentMethod *CasesWebhookConfigCreateCommentMethod `json:"createCommentMethod,omitempty"` + + // CreateCommentUrl The REST API URL to create a case comment by ID in the third-party system. You can use a variable to add the external system ID to the URL. If you are using the `xpack.actions.allowedHosts setting`, add the hostname to the allowed hosts. + CreateCommentUrl *string `json:"createCommentUrl,omitempty"` + + // CreateIncidentJson A JSON payload sent to the create case URL to create a case. You can use variables to add case data to the payload. Required variables are `case.title` and `case.description`. Due to Mustache template variables (which is the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid to avoid future validation errors; disregard Mustache variables during your review. + CreateIncidentJson string `json:"createIncidentJson"` + + // CreateIncidentMethod The REST API HTTP request method to create a case in the third-party system. Valid values are `patch`, `post`, and `put`. + CreateIncidentMethod *CasesWebhookConfigCreateIncidentMethod `json:"createIncidentMethod,omitempty"` + + // CreateIncidentResponseKey The JSON key in the create external case response that contains the case ID. + CreateIncidentResponseKey string `json:"createIncidentResponseKey"` + + // CreateIncidentUrl The REST API URL to create a case in the third-party system. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + CreateIncidentUrl string `json:"createIncidentUrl"` + + // GetIncidentResponseExternalTitleKey The JSON key in get external case response that contains the case title. + GetIncidentResponseExternalTitleKey string `json:"getIncidentResponseExternalTitleKey"` + + // GetIncidentUrl The REST API URL to get the case by ID from the third-party system. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. You can use a variable to add the external system ID to the URL. Due to Mustache template variables (the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid, disregarding the Mustache variables, so the later validation will pass. + GetIncidentUrl string `json:"getIncidentUrl"` + + // HasAuth If true, a username and password for login type authentication must be provided. + HasAuth *HasAuth `json:"hasAuth,omitempty"` + + // Headers A set of key-value pairs sent as headers with the request URLs for the create case, update case, get case, and create comment methods. + Headers *string `json:"headers,omitempty"` + + // UpdateIncidentJson The JSON payload sent to the update case URL to update the case. You can use variables to add Kibana Cases data to the payload. Required variables are `case.title` and `case.description`. Due to Mustache template variables (which is the text enclosed in triple braces, for example, `{{{case.title}}}`), the JSON is not validated when you create the connector. The JSON is validated after the Mustache variables have been placed when REST method runs. Manually ensure that the JSON is valid to avoid future validation errors; disregard Mustache variables during your review. + UpdateIncidentJson string `json:"updateIncidentJson"` + + // UpdateIncidentMethod The REST API HTTP request method to update the case in the third-party system. Valid values are `patch`, `post`, and `put`. + UpdateIncidentMethod *CasesWebhookConfigUpdateIncidentMethod `json:"updateIncidentMethod,omitempty"` + + // UpdateIncidentUrl The REST API URL to update the case by ID in the third-party system. You can use a variable to add the external system ID to the URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + UpdateIncidentUrl string `json:"updateIncidentUrl"` + + // VerificationMode Controls the verification of certificates. Use `full` to validate that the certificate has an issue date within the `not_before` and `not_after` dates, chains to a trusted certificate authority (CA), and has a hostname or IP address that matches the names within the certificate. Use `certificate` to validate the certificate and verify that it is signed by a trusted authority; this option does not check the certificate hostname. Use `none` to skip certificate validation. + VerificationMode *VerificationMode `json:"verificationMode,omitempty"` + + // ViewIncidentUrl The URL to view the case in the external system. You can use variables to add the external system ID or external system title to the URL. + ViewIncidentUrl string `json:"viewIncidentUrl"` +} + +// CasesWebhookConfigCreateCommentMethod The REST API HTTP request method to create a case comment in the third-party system. Valid values are `patch`, `post`, and `put`. +type CasesWebhookConfigCreateCommentMethod string + +// CasesWebhookConfigCreateIncidentMethod The REST API HTTP request method to create a case in the third-party system. Valid values are `patch`, `post`, and `put`. +type CasesWebhookConfigCreateIncidentMethod string + +// CasesWebhookConfigUpdateIncidentMethod The REST API HTTP request method to update the case in the third-party system. Valid values are `patch`, `post`, and `put`. +type CasesWebhookConfigUpdateIncidentMethod string + +// CasesWebhookSecrets defines model for cases_webhook_secrets. +type CasesWebhookSecrets struct { + // Crt If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the CRT or CERT file. + Crt *Crt `json:"crt,omitempty"` + + // Key If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the KEY file. + Key *Key `json:"key,omitempty"` + + // Password The password for HTTP basic authentication. If `hasAuth` is set to `true` and and `authType` is `webhook-authentication-basic`, this property is required. + Password *string `json:"password,omitempty"` + + // Pfx If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-pfx`, it is a base64 encoded version of the PFX or P12 file. + Pfx *Pfx `json:"pfx,omitempty"` + + // User The username for HTTP basic authentication. If `hasAuth` is set to `true` and `authType` is `webhook-authentication-basic`, this property is required. + User *string `json:"user,omitempty"` +} + +// CertType If the `authType` is `webhook-authentication-ssl`, specifies whether the certificate authentication data is in a CRT and key file format or a PFX file format. +type CertType string + +// ConnectorResponse defines model for connector_response. +type ConnectorResponse struct { + Config *map[string]interface{} `json:"config,omitempty"` + + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` + + // Id The identifier for the connector. + Id string `json:"id"` + + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` + + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` + + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` +} + +// CreateConnectorConfig The connector configuration details. +type CreateConnectorConfig struct { + AdditionalProperties map[string]interface{} `json:"-"` + union json.RawMessage +} + +// CreateConnectorSecrets defines model for create_connector_secrets. +type CreateConnectorSecrets struct { + AdditionalProperties map[string]interface{} `json:"-"` + union json.RawMessage +} + +// CreateParamResponse defines model for create_param_response. +type CreateParamResponse struct { + union json.RawMessage +} + +// CreateParamResponse0 defines model for . +type CreateParamResponse0 = []SyntheticsPostParameterResponse + +// CrowdstrikeConfig Defines config properties for connectors when type is `.crowdstrike`. +type CrowdstrikeConfig struct { + // Url The CrowdStrike tenant URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + Url string `json:"url"` +} + +// CrowdstrikeSecrets Defines secrets for connectors when type is `.crowdstrike`. +type CrowdstrikeSecrets struct { + // ClientId The CrowdStrike API client identifier. + ClientId string `json:"clientId"` + + // ClientSecret The CrowdStrike API client secret to authenticate the `clientId`. + ClientSecret string `json:"clientSecret"` +} + +// Crt If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the CRT or CERT file. +type Crt = string + +// D3securityConfig Defines properties for connectors when type is `.d3security`. +type D3securityConfig struct { + // Url The D3 Security API request URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + Url string `json:"url"` +} + +// D3securitySecrets Defines secrets for connectors when type is `.d3security`. +type D3securitySecrets struct { + // Token The D3 Security token. + Token string `json:"token"` +} + +// DefenderConfig Defines properties for connectors when type is `.microsoft_defender_endpoint`. +type DefenderConfig struct { + // ApiUrl The URL of the Microsoft Defender for Endpoint API. If you are using the `xpack.actions.allowedHosts` setting, make sure the hostname is added to the allowed hosts. + ApiUrl string `json:"apiUrl"` + + // ClientId The application (client) identifier for your app in the Azure portal. + ClientId *string `json:"clientId,omitempty"` + + // OAuthScope The OAuth scopes or permission sets for the Microsoft Defender for Endpoint API. + OAuthScope *string `json:"oAuthScope,omitempty"` + + // OAuthServerUrl The OAuth server URL where authentication is sent and received for the Microsoft Defender for Endpoint API. + OAuthServerUrl *string `json:"oAuthServerUrl,omitempty"` + + // TenantId The tenant identifier for your app in the Azure portal. + TenantId *string `json:"tenantId,omitempty"` +} + +// DefenderSecrets Defines secrets for connectors when type is `..microsoft_defender_endpoint`. +type DefenderSecrets struct { + // ClientSecret The client secret for your app in the Azure portal. + ClientSecret string `json:"clientSecret"` +} + +// EmailConfig Defines properties for connectors when type is `.email`. +type EmailConfig struct { + // ClientId The client identifier, which is a part of OAuth 2.0 client credentials authentication, in GUID format. If `service` is `exchange_server`, this property is required. + ClientId *string `json:"clientId,omitempty"` + + // From The from address for all emails sent by the connector. It must be specified in `user@host-name` format. + From string `json:"from"` + + // HasAuth Specifies whether a user and password are required inside the secrets configuration. + HasAuth *bool `json:"hasAuth,omitempty"` + + // Host The host name of the service provider. If the `service` is `elastic_cloud` (for Elastic Cloud notifications) or one of Nodemailer's well-known email service providers, this property is ignored. If `service` is `other`, this property must be defined. + Host *string `json:"host,omitempty"` + OauthTokenUrl *string `json:"oauthTokenUrl,omitempty"` + + // Port The port to connect to on the service provider. If the `service` is `elastic_cloud` (for Elastic Cloud notifications) or one of Nodemailer's well-known email service providers, this property is ignored. If `service` is `other`, this property must be defined. + Port *int `json:"port,omitempty"` + + // Secure Specifies whether the connection to the service provider will use TLS. If the `service` is `elastic_cloud` (for Elastic Cloud notifications) or one of Nodemailer's well-known email service providers, this property is ignored. + Secure *bool `json:"secure,omitempty"` + + // Service The name of the email service. + Service *EmailConfigService `json:"service,omitempty"` + + // TenantId The tenant identifier, which is part of OAuth 2.0 client credentials authentication, in GUID format. If `service` is `exchange_server`, this property is required. + TenantId *string `json:"tenantId,omitempty"` +} + +// EmailConfigService The name of the email service. +type EmailConfigService string + +// EmailSecrets Defines secrets for connectors when type is `.email`. +type EmailSecrets struct { + // ClientSecret The Microsoft Exchange Client secret for OAuth 2.0 client credentials authentication. It must be URL-encoded. If `service` is `exchange_server`, this property is required. + ClientSecret *string `json:"clientSecret,omitempty"` + + // Password The password for HTTP basic authentication. If `hasAuth` is set to `true`, this property is required. + Password *string `json:"password,omitempty"` + + // User The username for HTTP basic authentication. If `hasAuth` is set to `true`, this property is required. + User *string `json:"user,omitempty"` +} + +// EnrollmentApiKey defines model for enrollment_api_key. +type EnrollmentApiKey struct { + // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. + Active bool `json:"active"` + + // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. + ApiKey string `json:"api_key"` + + // ApiKeyId The ID of the API key in the Security API. + ApiKeyId string `json:"api_key_id"` + CreatedAt string `json:"created_at"` + Hidden *bool `json:"hidden,omitempty"` + Id string `json:"id"` + + // Name The name of the enrollment API key. + Name *string `json:"name,omitempty"` + + // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. + PolicyId *string `json:"policy_id,omitempty"` +} + +// Excludehitsfrompreviousrun Indicates whether to exclude matches from previous runs. If `true`, you can avoid alert duplication by excluding documents that have already been detected by the previous rule run. This option is not available when a grouping field is specified. +type Excludehitsfrompreviousrun = bool + +// Filter A filter written in Elasticsearch Query Domain Specific Language (DSL) as defined in the `kbn-es-query` package. +type Filter struct { + State *map[string]interface{} `json:"$state,omitempty"` + Meta *struct { + Alias *string `json:"alias,omitempty"` + ControlledBy *string `json:"controlledBy,omitempty"` + Disabled *bool `json:"disabled,omitempty"` + Field *string `json:"field,omitempty"` + Group *string `json:"group,omitempty"` + Index *string `json:"index,omitempty"` + IsMultiIndex *bool `json:"isMultiIndex,omitempty"` + Key *string `json:"key,omitempty"` + Negate *bool `json:"negate,omitempty"` + Params *map[string]interface{} `json:"params,omitempty"` + Type *string `json:"type,omitempty"` + Value *string `json:"value,omitempty"` + } `json:"meta,omitempty"` + Query *map[string]interface{} `json:"query,omitempty"` +} + +// GeminiConfig Defines properties for connectors when type is `.gemini`. +type GeminiConfig struct { + // ApiUrl The Google Gemini request URL. + ApiUrl string `json:"apiUrl"` + + // DefaultModel The generative artificial intelligence model for Google Gemini to use. + DefaultModel *string `json:"defaultModel,omitempty"` + + // GcpProjectID The Google ProjectID that has Vertex AI endpoint enabled. + GcpProjectID string `json:"gcpProjectID"` + + // GcpRegion The GCP region where the Vertex AI endpoint enabled. + GcpRegion string `json:"gcpRegion"` +} + +// GeminiSecrets Defines secrets for connectors when type is `.gemini`. +type GeminiSecrets struct { + // CredentialsJson The service account credentials JSON file. The service account should have Vertex AI user IAM role assigned to it. + CredentialsJson string `json:"credentialsJson"` +} + +// GenaiAzureConfig Defines properties for connectors when type is `.gen-ai` and the API provider is `Azure OpenAI`. +type GenaiAzureConfig struct { + // ApiProvider The OpenAI API provider. + ApiProvider GenaiAzureConfigApiProvider `json:"apiProvider"` + + // ApiUrl The OpenAI API endpoint. + ApiUrl string `json:"apiUrl"` +} + +// GenaiAzureConfigApiProvider The OpenAI API provider. +type GenaiAzureConfigApiProvider string + +// GenaiOpenaiConfig Defines properties for connectors when type is `.gen-ai` and the API provider is `OpenAI`. +type GenaiOpenaiConfig struct { + // ApiProvider The OpenAI API provider. + ApiProvider GenaiOpenaiConfigApiProvider `json:"apiProvider"` + + // ApiUrl The OpenAI API endpoint. + ApiUrl string `json:"apiUrl"` + + // DefaultModel The default model to use for requests. + DefaultModel *string `json:"defaultModel,omitempty"` +} + +// GenaiOpenaiConfigApiProvider The OpenAI API provider. +type GenaiOpenaiConfigApiProvider string + +// GenaiOpenaiOtherConfig Defines properties for connectors when type is `.gen-ai` and the API provider is `Other` (OpenAI-compatible service), including optional PKI authentication. +type GenaiOpenaiOtherConfig struct { + // ApiProvider The OpenAI API provider. + ApiProvider GenaiOpenaiOtherConfigApiProvider `json:"apiProvider"` + + // ApiUrl The OpenAI-compatible API endpoint. + ApiUrl string `json:"apiUrl"` + + // CaData PEM-encoded CA certificate content. + CaData *string `json:"caData,omitempty"` + + // CertificateData PEM-encoded certificate content. + CertificateData *string `json:"certificateData,omitempty"` + + // DefaultModel The default model to use for requests. + DefaultModel string `json:"defaultModel"` + + // Headers Custom headers to include in requests. + Headers *map[string]string `json:"headers,omitempty"` + + // PrivateKeyData PEM-encoded private key content. + PrivateKeyData *string `json:"privateKeyData,omitempty"` + + // VerificationMode SSL verification mode for PKI authentication. + VerificationMode *GenaiOpenaiOtherConfigVerificationMode `json:"verificationMode,omitempty"` +} + +// GenaiOpenaiOtherConfigApiProvider The OpenAI API provider. +type GenaiOpenaiOtherConfigApiProvider string + +// GenaiOpenaiOtherConfigVerificationMode SSL verification mode for PKI authentication. +type GenaiOpenaiOtherConfigVerificationMode string + +// GenaiSecrets Defines secrets for connectors when type is `.gen-ai`. Supports both API key authentication (OpenAI, Azure OpenAI, and `Other`) and PKI authentication (`Other` provider only). PKI fields must be base64-encoded PEM content. +type GenaiSecrets struct { + // ApiKey The API key for authentication. For OpenAI and Azure OpenAI providers, it is required. For the `Other` provider, it is required if you do not use PKI authentication. With PKI, you can also optionally include an API key if the OpenAI-compatible service supports or requires one. + ApiKey *string `json:"apiKey,omitempty"` + + // CaData Base64-encoded PEM CA certificate content for PKI authentication (Other provider only). Optional. + CaData *string `json:"caData,omitempty"` + + // CertificateData Base64-encoded PEM certificate content for PKI authentication (Other provider only). Required for PKI. + CertificateData *string `json:"certificateData,omitempty"` + + // PrivateKeyData Base64-encoded PEM private key content for PKI authentication (Other provider only). Required for PKI. + PrivateKeyData *string `json:"privateKeyData,omitempty"` +} + +// GetDataViewsResponseItem defines model for get_data_views_response_item. +type GetDataViewsResponseItem struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + Namespaces *[]string `json:"namespaces,omitempty"` + Title *string `json:"title,omitempty"` + TypeMeta *map[string]interface{} `json:"typeMeta,omitempty"` +} + +// Groupby Indicates whether the aggregation is applied over all documents (`all`) or split into groups (`top`) using a grouping field (`termField`). If grouping is used, an alert will be created for each group when it exceeds the threshold; only the top groups (up to `termSize` number of groups) are checked. +type Groupby string + +// HasAuth If true, a username and password for login type authentication must be provided. +type HasAuth = bool + +// IndexConfig Defines properties for connectors when type is `.index`. +type IndexConfig struct { + // ExecutionTimeField A field that indicates when the document was indexed. + ExecutionTimeField *string `json:"executionTimeField,omitempty"` + + // Index The Elasticsearch index to be written to. + Index string `json:"index"` + + // Refresh The refresh policy for the write request, which affects when changes are made visible to search. Refer to the refresh setting for Elasticsearch document APIs. + Refresh *bool `json:"refresh,omitempty"` +} + +// JiraConfig Defines properties for connectors when type is `.jira`. +type JiraConfig struct { + // ApiUrl The Jira instance URL. + ApiUrl string `json:"apiUrl"` + + // ProjectKey The Jira project key. + ProjectKey string `json:"projectKey"` +} + +// JiraSecrets Defines secrets for connectors when type is `.jira`. +type JiraSecrets struct { + // ApiToken The Jira API authentication token for HTTP basic authentication. + ApiToken string `json:"apiToken"` + + // Email The account email for HTTP Basic authentication. + Email string `json:"email"` +} + +// Key If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the KEY file. +type Key = string + +// NewOutputElasticsearch defines model for new_output_elasticsearch. +type NewOutputElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + Preset *NewOutputElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + Key *NewOutputElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Shipper *NewOutputShipper `json:"shipper,omitempty"` + Ssl *NewOutputSsl `json:"ssl,omitempty"` + Type NewOutputElasticsearchType `json:"type"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` +} + +// NewOutputElasticsearchPreset defines model for NewOutputElasticsearch.Preset. +type NewOutputElasticsearchPreset string + +// NewOutputElasticsearchSecretsSslKey0 defines model for . +type NewOutputElasticsearchSecretsSslKey0 struct { + Id string `json:"id"` +} + +// NewOutputElasticsearchSecretsSslKey1 defines model for . +type NewOutputElasticsearchSecretsSslKey1 = string + +// NewOutputElasticsearch_Secrets_Ssl_Key defines model for NewOutputElasticsearch.Secrets.Ssl.Key. +type NewOutputElasticsearch_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// NewOutputElasticsearchType defines model for NewOutputElasticsearch.Type. +type NewOutputElasticsearchType string + +// NewOutputKafka defines model for new_output_kafka. +type NewOutputKafka struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + AuthType NewOutputKafkaAuthType `json:"auth_type"` + BrokerTimeout *float32 `json:"broker_timeout,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ClientId *string `json:"client_id,omitempty"` + Compression *NewOutputKafkaCompression `json:"compression,omitempty"` + CompressionLevel interface{} `json:"compression_level"` + ConfigYaml *string `json:"config_yaml,omitempty"` + ConnectionType interface{} `json:"connection_type"` + Hash *struct { + Hash *string `json:"hash,omitempty"` + Random *bool `json:"random,omitempty"` + } `json:"hash,omitempty"` + Headers *[]struct { + Key string `json:"key"` + Value string `json:"value"` + } `json:"headers,omitempty"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Key *string `json:"key,omitempty"` + Name string `json:"name"` + Partition *NewOutputKafkaPartition `json:"partition,omitempty"` + Password interface{} `json:"password"` + ProxyId *string `json:"proxy_id,omitempty"` + Random *struct { + GroupEvents *float32 `json:"group_events,omitempty"` + } `json:"random,omitempty"` + RequiredAcks *NewOutputKafkaRequiredAcks `json:"required_acks,omitempty"` + RoundRobin *struct { + GroupEvents *float32 `json:"group_events,omitempty"` + } `json:"round_robin,omitempty"` + Sasl *struct { + Mechanism *NewOutputKafkaSaslMechanism `json:"mechanism,omitempty"` + } `json:"sasl,omitempty"` + Secrets *struct { + Password *NewOutputKafka_Secrets_Password `json:"password,omitempty"` + Ssl *struct { + Key NewOutputKafka_Secrets_Ssl_Key `json:"key"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Shipper *NewOutputShipper `json:"shipper,omitempty"` + Ssl *NewOutputSsl `json:"ssl,omitempty"` + Timeout *float32 `json:"timeout,omitempty"` + Topic *string `json:"topic,omitempty"` + Type NewOutputKafkaType `json:"type"` + Username interface{} `json:"username"` + Version *string `json:"version,omitempty"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` +} + +// NewOutputKafkaAuthType defines model for NewOutputKafka.AuthType. +type NewOutputKafkaAuthType string + +// NewOutputKafkaCompression defines model for NewOutputKafka.Compression. +type NewOutputKafkaCompression string + +// NewOutputKafkaPartition defines model for NewOutputKafka.Partition. +type NewOutputKafkaPartition string + +// NewOutputKafkaRequiredAcks defines model for NewOutputKafka.RequiredAcks. +type NewOutputKafkaRequiredAcks int + +// NewOutputKafkaSaslMechanism defines model for NewOutputKafka.Sasl.Mechanism. +type NewOutputKafkaSaslMechanism string + +// NewOutputKafkaSecretsPassword0 defines model for . +type NewOutputKafkaSecretsPassword0 struct { + Id string `json:"id"` +} + +// NewOutputKafkaSecretsPassword1 defines model for . +type NewOutputKafkaSecretsPassword1 = string + +// NewOutputKafka_Secrets_Password defines model for NewOutputKafka.Secrets.Password. +type NewOutputKafka_Secrets_Password struct { + union json.RawMessage +} + +// NewOutputKafkaSecretsSslKey0 defines model for . +type NewOutputKafkaSecretsSslKey0 struct { + Id string `json:"id"` +} + +// NewOutputKafkaSecretsSslKey1 defines model for . +type NewOutputKafkaSecretsSslKey1 = string + +// NewOutputKafka_Secrets_Ssl_Key defines model for NewOutputKafka.Secrets.Ssl.Key. +type NewOutputKafka_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// NewOutputKafkaType defines model for NewOutputKafka.Type. +type NewOutputKafkaType string + +// NewOutputLogstash defines model for new_output_logstash. +type NewOutputLogstash struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + Key *NewOutputLogstash_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Shipper *NewOutputShipper `json:"shipper,omitempty"` + Ssl *NewOutputSsl `json:"ssl,omitempty"` + Type NewOutputLogstashType `json:"type"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` +} + +// NewOutputLogstashSecretsSslKey0 defines model for . +type NewOutputLogstashSecretsSslKey0 struct { + Id string `json:"id"` +} + +// NewOutputLogstashSecretsSslKey1 defines model for . +type NewOutputLogstashSecretsSslKey1 = string + +// NewOutputLogstash_Secrets_Ssl_Key defines model for NewOutputLogstash.Secrets.Ssl.Key. +type NewOutputLogstash_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// NewOutputLogstashType defines model for NewOutputLogstash.Type. +type NewOutputLogstashType string + +// NewOutputRemoteElasticsearch defines model for new_output_remote_elasticsearch. +type NewOutputRemoteElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + KibanaApiKey *string `json:"kibana_api_key,omitempty"` + KibanaUrl *string `json:"kibana_url,omitempty"` + Name string `json:"name"` + Preset *NewOutputRemoteElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + ServiceToken *NewOutputRemoteElasticsearch_Secrets_ServiceToken `json:"service_token,omitempty"` + Ssl *struct { + Key *NewOutputRemoteElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + ServiceToken *string `json:"service_token,omitempty"` + Shipper *NewOutputShipper `json:"shipper,omitempty"` + Ssl *NewOutputSsl `json:"ssl,omitempty"` + SyncIntegrations *bool `json:"sync_integrations,omitempty"` + SyncUninstalledIntegrations *bool `json:"sync_uninstalled_integrations,omitempty"` + Type NewOutputRemoteElasticsearchType `json:"type"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` +} + +// NewOutputRemoteElasticsearchPreset defines model for NewOutputRemoteElasticsearch.Preset. +type NewOutputRemoteElasticsearchPreset string + +// NewOutputRemoteElasticsearchSecretsServiceToken0 defines model for . +type NewOutputRemoteElasticsearchSecretsServiceToken0 struct { + Id string `json:"id"` +} + +// NewOutputRemoteElasticsearchSecretsServiceToken1 defines model for . +type NewOutputRemoteElasticsearchSecretsServiceToken1 = string + +// NewOutputRemoteElasticsearch_Secrets_ServiceToken defines model for NewOutputRemoteElasticsearch.Secrets.ServiceToken. +type NewOutputRemoteElasticsearch_Secrets_ServiceToken struct { + union json.RawMessage +} + +// NewOutputRemoteElasticsearchSecretsSslKey0 defines model for . +type NewOutputRemoteElasticsearchSecretsSslKey0 struct { + Id string `json:"id"` +} + +// NewOutputRemoteElasticsearchSecretsSslKey1 defines model for . +type NewOutputRemoteElasticsearchSecretsSslKey1 = string + +// NewOutputRemoteElasticsearch_Secrets_Ssl_Key defines model for NewOutputRemoteElasticsearch.Secrets.Ssl.Key. +type NewOutputRemoteElasticsearch_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// NewOutputRemoteElasticsearchType defines model for NewOutputRemoteElasticsearch.Type. +type NewOutputRemoteElasticsearchType string + +// NewOutputShipper defines model for new_output_shipper. +type NewOutputShipper struct { + CompressionLevel *float32 `json:"compression_level,omitempty"` + DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` + DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` + DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` + DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` + DiskQueuePath *string `json:"disk_queue_path,omitempty"` + Loadbalance *bool `json:"loadbalance,omitempty"` + MaxBatchBytes *float32 `json:"max_batch_bytes,omitempty"` + MemQueueEvents *float32 `json:"mem_queue_events,omitempty"` + QueueFlushTimeout *float32 `json:"queue_flush_timeout,omitempty"` +} + +// NewOutputSsl defines model for new_output_ssl. +type NewOutputSsl struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + VerificationMode *NewOutputSslVerificationMode `json:"verification_mode,omitempty"` +} + +// NewOutputSslVerificationMode defines model for NewOutputSsl.VerificationMode. +type NewOutputSslVerificationMode string + +// NewOutputUnion defines model for new_output_union. +type NewOutputUnion struct { + union json.RawMessage +} + +// OpsgenieConfig Defines properties for connectors when type is `.opsgenie`. +type OpsgenieConfig struct { + // ApiUrl The Opsgenie URL. For example, `https://api.opsgenie.com` or `https://api.eu.opsgenie.com`. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + ApiUrl string `json:"apiUrl"` +} + +// OpsgenieSecrets Defines secrets for connectors when type is `.opsgenie`. +type OpsgenieSecrets struct { + // ApiKey The Opsgenie API authentication key for HTTP Basic authentication. + ApiKey string `json:"apiKey"` +} + +// OutputElasticsearch defines model for output_elasticsearch. +type OutputElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + Preset *OutputElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *OutputElasticsearch_Secrets `json:"secrets,omitempty"` + Shipper *OutputShipper `json:"shipper,omitempty"` + Ssl *OutputSsl `json:"ssl,omitempty"` + Type OutputElasticsearchType `json:"type"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputElasticsearchPreset defines model for OutputElasticsearch.Preset. +type OutputElasticsearchPreset string + +// OutputElasticsearchSecretsSslKey0 defines model for . +type OutputElasticsearchSecretsSslKey0 struct { + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputElasticsearchSecretsSslKey1 defines model for . +type OutputElasticsearchSecretsSslKey1 = string + +// OutputElasticsearch_Secrets_Ssl_Key defines model for OutputElasticsearch.Secrets.Ssl.Key. +type OutputElasticsearch_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// OutputElasticsearch_Secrets_Ssl defines model for OutputElasticsearch.Secrets.Ssl. +type OutputElasticsearch_Secrets_Ssl struct { + Key *OutputElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputElasticsearch_Secrets defines model for OutputElasticsearch.Secrets. +type OutputElasticsearch_Secrets struct { + Ssl *OutputElasticsearch_Secrets_Ssl `json:"ssl,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputElasticsearchType defines model for OutputElasticsearch.Type. +type OutputElasticsearchType string + +// OutputKafka defines model for output_kafka. +type OutputKafka struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + AuthType OutputKafkaAuthType `json:"auth_type"` + BrokerTimeout *float32 `json:"broker_timeout,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ClientId *string `json:"client_id,omitempty"` + Compression *OutputKafkaCompression `json:"compression,omitempty"` + CompressionLevel interface{} `json:"compression_level"` + ConfigYaml *string `json:"config_yaml,omitempty"` + ConnectionType interface{} `json:"connection_type"` + Hash *OutputKafka_Hash `json:"hash,omitempty"` + Headers *[]OutputKafka_Headers_Item `json:"headers,omitempty"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Key *string `json:"key,omitempty"` + Name string `json:"name"` + Partition *OutputKafkaPartition `json:"partition,omitempty"` + Password interface{} `json:"password"` + ProxyId *string `json:"proxy_id,omitempty"` + Random *OutputKafka_Random `json:"random,omitempty"` + RequiredAcks *OutputKafkaRequiredAcks `json:"required_acks,omitempty"` + RoundRobin *OutputKafka_RoundRobin `json:"round_robin,omitempty"` + Sasl *OutputKafka_Sasl `json:"sasl,omitempty"` + Secrets *OutputKafka_Secrets `json:"secrets,omitempty"` + Shipper *OutputShipper `json:"shipper,omitempty"` + Ssl *OutputSsl `json:"ssl,omitempty"` + Timeout *float32 `json:"timeout,omitempty"` + Topic *string `json:"topic,omitempty"` + Type OutputKafkaType `json:"type"` + Username interface{} `json:"username"` + Version *string `json:"version,omitempty"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaAuthType defines model for OutputKafka.AuthType. +type OutputKafkaAuthType string + +// OutputKafkaCompression defines model for OutputKafka.Compression. +type OutputKafkaCompression string + +// OutputKafka_Hash defines model for OutputKafka.Hash. +type OutputKafka_Hash struct { + Hash *string `json:"hash,omitempty"` + Random *bool `json:"random,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafka_Headers_Item defines model for output_kafka.headers.Item. +type OutputKafka_Headers_Item struct { + Key string `json:"key"` + Value string `json:"value"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaPartition defines model for OutputKafka.Partition. +type OutputKafkaPartition string + +// OutputKafka_Random defines model for OutputKafka.Random. +type OutputKafka_Random struct { + GroupEvents *float32 `json:"group_events,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaRequiredAcks defines model for OutputKafka.RequiredAcks. +type OutputKafkaRequiredAcks int + +// OutputKafka_RoundRobin defines model for OutputKafka.RoundRobin. +type OutputKafka_RoundRobin struct { + GroupEvents *float32 `json:"group_events,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaSaslMechanism defines model for OutputKafka.Sasl.Mechanism. +type OutputKafkaSaslMechanism string + +// OutputKafka_Sasl defines model for OutputKafka.Sasl. +type OutputKafka_Sasl struct { + Mechanism *OutputKafkaSaslMechanism `json:"mechanism,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaSecretsPassword0 defines model for . +type OutputKafkaSecretsPassword0 struct { + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaSecretsPassword1 defines model for . +type OutputKafkaSecretsPassword1 = string + +// OutputKafka_Secrets_Password defines model for OutputKafka.Secrets.Password. +type OutputKafka_Secrets_Password struct { + union json.RawMessage +} + +// OutputKafkaSecretsSslKey0 defines model for . +type OutputKafkaSecretsSslKey0 struct { + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaSecretsSslKey1 defines model for . +type OutputKafkaSecretsSslKey1 = string + +// OutputKafka_Secrets_Ssl_Key defines model for OutputKafka.Secrets.Ssl.Key. +type OutputKafka_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// OutputKafka_Secrets_Ssl defines model for OutputKafka.Secrets.Ssl. +type OutputKafka_Secrets_Ssl struct { + Key OutputKafka_Secrets_Ssl_Key `json:"key"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafka_Secrets defines model for OutputKafka.Secrets. +type OutputKafka_Secrets struct { + Password *OutputKafka_Secrets_Password `json:"password,omitempty"` + Ssl *OutputKafka_Secrets_Ssl `json:"ssl,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputKafkaType defines model for OutputKafka.Type. +type OutputKafkaType string + +// OutputLogstash defines model for output_logstash. +type OutputLogstash struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *OutputLogstash_Secrets `json:"secrets,omitempty"` + Shipper *OutputShipper `json:"shipper,omitempty"` + Ssl *OutputSsl `json:"ssl,omitempty"` + Type OutputLogstashType `json:"type"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputLogstashSecretsSslKey0 defines model for . +type OutputLogstashSecretsSslKey0 struct { + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputLogstashSecretsSslKey1 defines model for . +type OutputLogstashSecretsSslKey1 = string + +// OutputLogstash_Secrets_Ssl_Key defines model for OutputLogstash.Secrets.Ssl.Key. +type OutputLogstash_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// OutputLogstash_Secrets_Ssl defines model for OutputLogstash.Secrets.Ssl. +type OutputLogstash_Secrets_Ssl struct { + Key *OutputLogstash_Secrets_Ssl_Key `json:"key,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputLogstash_Secrets defines model for OutputLogstash.Secrets. +type OutputLogstash_Secrets struct { + Ssl *OutputLogstash_Secrets_Ssl `json:"ssl,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputLogstashType defines model for OutputLogstash.Type. +type OutputLogstashType string + +// OutputRemoteElasticsearch defines model for output_remote_elasticsearch. +type OutputRemoteElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` + Hosts []string `json:"hosts"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + KibanaApiKey *string `json:"kibana_api_key,omitempty"` + KibanaUrl *string `json:"kibana_url,omitempty"` + Name string `json:"name"` + Preset *OutputRemoteElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *OutputRemoteElasticsearch_Secrets `json:"secrets,omitempty"` + ServiceToken *string `json:"service_token,omitempty"` + Shipper *OutputShipper `json:"shipper,omitempty"` + Ssl *OutputSsl `json:"ssl,omitempty"` + SyncIntegrations *bool `json:"sync_integrations,omitempty"` + SyncUninstalledIntegrations *bool `json:"sync_uninstalled_integrations,omitempty"` + Type OutputRemoteElasticsearchType `json:"type"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputRemoteElasticsearchPreset defines model for OutputRemoteElasticsearch.Preset. +type OutputRemoteElasticsearchPreset string + +// OutputRemoteElasticsearchSecretsServiceToken0 defines model for . +type OutputRemoteElasticsearchSecretsServiceToken0 struct { + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputRemoteElasticsearchSecretsServiceToken1 defines model for . +type OutputRemoteElasticsearchSecretsServiceToken1 = string + +// OutputRemoteElasticsearch_Secrets_ServiceToken defines model for OutputRemoteElasticsearch.Secrets.ServiceToken. +type OutputRemoteElasticsearch_Secrets_ServiceToken struct { + union json.RawMessage +} + +// OutputRemoteElasticsearchSecretsSslKey0 defines model for . +type OutputRemoteElasticsearchSecretsSslKey0 struct { + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputRemoteElasticsearchSecretsSslKey1 defines model for . +type OutputRemoteElasticsearchSecretsSslKey1 = string + +// OutputRemoteElasticsearch_Secrets_Ssl_Key defines model for OutputRemoteElasticsearch.Secrets.Ssl.Key. +type OutputRemoteElasticsearch_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// OutputRemoteElasticsearch_Secrets_Ssl defines model for OutputRemoteElasticsearch.Secrets.Ssl. +type OutputRemoteElasticsearch_Secrets_Ssl struct { + Key *OutputRemoteElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputRemoteElasticsearch_Secrets defines model for OutputRemoteElasticsearch.Secrets. +type OutputRemoteElasticsearch_Secrets struct { + ServiceToken *OutputRemoteElasticsearch_Secrets_ServiceToken `json:"service_token,omitempty"` + Ssl *OutputRemoteElasticsearch_Secrets_Ssl `json:"ssl,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputRemoteElasticsearchType defines model for OutputRemoteElasticsearch.Type. +type OutputRemoteElasticsearchType string + +// OutputShipper defines model for output_shipper. +type OutputShipper struct { + CompressionLevel *float32 `json:"compression_level,omitempty"` + DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` + DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` + DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` + DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` + DiskQueuePath *string `json:"disk_queue_path,omitempty"` + Loadbalance *bool `json:"loadbalance,omitempty"` + MaxBatchBytes *float32 `json:"max_batch_bytes,omitempty"` + MemQueueEvents *float32 `json:"mem_queue_events,omitempty"` + QueueFlushTimeout *float32 `json:"queue_flush_timeout,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputSsl defines model for output_ssl. +type OutputSsl struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + VerificationMode *OutputSslVerificationMode `json:"verification_mode,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// OutputSslVerificationMode defines model for OutputSsl.VerificationMode. +type OutputSslVerificationMode string + +// OutputUnion defines model for output_union. +type OutputUnion struct { + union json.RawMessage +} + +// PackageInfo defines model for package_info. +type PackageInfo struct { + Agent *struct { + Privileges *struct { + Root *bool `json:"root,omitempty"` + } `json:"privileges,omitempty"` + } `json:"agent,omitempty"` + AssetTags *[]struct { + AssetIds *[]string `json:"asset_ids,omitempty"` + AssetTypes *[]string `json:"asset_types,omitempty"` + Text string `json:"text"` + } `json:"asset_tags,omitempty"` + Assets map[string]interface{} `json:"assets"` + Categories *[]string `json:"categories,omitempty"` + Conditions *PackageInfo_Conditions `json:"conditions,omitempty"` + DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` + Description *string `json:"description,omitempty"` + Discovery *PackageInfo_Discovery `json:"discovery,omitempty"` + Download *string `json:"download,omitempty"` + Elasticsearch *map[string]interface{} `json:"elasticsearch,omitempty"` + FormatVersion *string `json:"format_version,omitempty"` + Icons *[]PackageInfo_Icons_Item `json:"icons,omitempty"` + InstallationInfo *PackageInfo_InstallationInfo `json:"installationInfo,omitempty"` + Internal *bool `json:"internal,omitempty"` + KeepPoliciesUpToDate *bool `json:"keepPoliciesUpToDate,omitempty"` + LatestVersion *string `json:"latestVersion,omitempty"` + License *string `json:"license,omitempty"` + LicensePath *string `json:"licensePath,omitempty"` + Name string `json:"name"` + Notice *string `json:"notice,omitempty"` + Owner *PackageInfo_Owner `json:"owner,omitempty"` + Path *string `json:"path,omitempty"` + PolicyTemplates *[]map[string]interface{} `json:"policy_templates,omitempty"` + Readme *string `json:"readme,omitempty"` + Release *PackageInfoRelease `json:"release,omitempty"` + Screenshots *[]struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"screenshots,omitempty"` + SignaturePath *string `json:"signature_path,omitempty"` + Source *PackageInfo_Source `json:"source,omitempty"` + Status *string `json:"status,omitempty"` + Title string `json:"title"` + Type *PackageInfo_Type `json:"type,omitempty"` + Vars *[]map[string]interface{} `json:"vars,omitempty"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Conditions_Elastic defines model for PackageInfo.Conditions.Elastic. +type PackageInfo_Conditions_Elastic struct { + Capabilities *[]string `json:"capabilities,omitempty"` + Subscription *string `json:"subscription,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Conditions_Kibana defines model for PackageInfo.Conditions.Kibana. +type PackageInfo_Conditions_Kibana struct { + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Conditions defines model for PackageInfo.Conditions. +type PackageInfo_Conditions struct { + Elastic *PackageInfo_Conditions_Elastic `json:"elastic,omitempty"` + Kibana *PackageInfo_Conditions_Kibana `json:"kibana,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Discovery_Datasets_Item defines model for PackageInfo.Discovery.Datasets.Item. +type PackageInfo_Discovery_Datasets_Item struct { + Name string `json:"name"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Discovery_Fields_Item defines model for PackageInfo.Discovery.Fields.Item. +type PackageInfo_Discovery_Fields_Item struct { + Name string `json:"name"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Discovery defines model for PackageInfo.Discovery. +type PackageInfo_Discovery struct { + Datasets *[]PackageInfo_Discovery_Datasets_Item `json:"datasets,omitempty"` + Fields *[]PackageInfo_Discovery_Fields_Item `json:"fields,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_Icons_Item defines model for package_info.icons.Item. +type PackageInfo_Icons_Item struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 defines model for PackageInfo.InstallationInfo.AdditionalSpacesInstalledKibana.Type.0. +type PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 string + +// PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 defines model for . +type PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 = string + +// PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type defines model for PackageInfo.InstallationInfo.AdditionalSpacesInstalledKibana.Type. +type PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type struct { + union json.RawMessage +} + +// PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item defines model for PackageInfo.InstallationInfo.AdditionalSpacesInstalledKibana.Item. +type PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features defines model for PackageInfo.InstallationInfo.ExperimentalDataStreamFeatures.Features. +type PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item defines model for PackageInfo.InstallationInfo.ExperimentalDataStreamFeatures.Item. +type PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item struct { + DataStream string `json:"data_stream"` + Features PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features `json:"features"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoInstallationInfoInstallSource defines model for PackageInfo.InstallationInfo.InstallSource. +type PackageInfoInstallationInfoInstallSource string + +// PackageInfoInstallationInfoInstallStatus defines model for PackageInfo.InstallationInfo.InstallStatus. +type PackageInfoInstallationInfoInstallStatus string + +// PackageInfoInstallationInfoInstalledEsType defines model for PackageInfo.InstallationInfo.InstalledEs.Type. +type PackageInfoInstallationInfoInstalledEsType string + +// PackageInfo_InstallationInfo_InstalledEs_Item defines model for PackageInfo.InstallationInfo.InstalledEs.Item. +type PackageInfo_InstallationInfo_InstalledEs_Item struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type PackageInfoInstallationInfoInstalledEsType `json:"type"` + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoInstallationInfoInstalledKibanaType0 defines model for PackageInfo.InstallationInfo.InstalledKibana.Type.0. +type PackageInfoInstallationInfoInstalledKibanaType0 string + +// PackageInfoInstallationInfoInstalledKibanaType1 defines model for . +type PackageInfoInstallationInfoInstalledKibanaType1 = string + +// PackageInfo_InstallationInfo_InstalledKibana_Type defines model for PackageInfo.InstallationInfo.InstalledKibana.Type. +type PackageInfo_InstallationInfo_InstalledKibana_Type struct { + union json.RawMessage +} + +// PackageInfo_InstallationInfo_InstalledKibana_Item defines model for PackageInfo.InstallationInfo.InstalledKibana.Item. +type PackageInfo_InstallationInfo_InstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PackageInfo_InstallationInfo_InstalledKibana_Type `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_InstallationInfo_LatestExecutedState defines model for PackageInfo.InstallationInfo.LatestExecutedState. +type PackageInfo_InstallationInfo_LatestExecutedState struct { + Error *string `json:"error,omitempty"` + Name *string `json:"name,omitempty"` + StartedAt *string `json:"started_at,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error defines model for PackageInfo.InstallationInfo.LatestInstallFailedAttempts.Error. +type PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error struct { + Message string `json:"message"` + Name string `json:"name"` + Stack *string `json:"stack,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item defines model for PackageInfo.InstallationInfo.LatestInstallFailedAttempts.Item. +type PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item struct { + CreatedAt string `json:"created_at"` + Error PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error `json:"error"` + TargetVersion string `json:"target_version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoInstallationInfoVerificationStatus defines model for PackageInfo.InstallationInfo.VerificationStatus. +type PackageInfoInstallationInfoVerificationStatus string + +// PackageInfo_InstallationInfo defines model for PackageInfo.InstallationInfo. +type PackageInfo_InstallationInfo struct { + AdditionalSpacesInstalledKibana *map[string][]PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item `json:"additional_spaces_installed_kibana,omitempty"` + CreatedAt *string `json:"created_at,omitempty"` + ExperimentalDataStreamFeatures *[]PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item `json:"experimental_data_stream_features,omitempty"` + InstallFormatSchemaVersion *string `json:"install_format_schema_version,omitempty"` + InstallSource PackageInfoInstallationInfoInstallSource `json:"install_source"` + InstallStatus PackageInfoInstallationInfoInstallStatus `json:"install_status"` + InstalledEs []PackageInfo_InstallationInfo_InstalledEs_Item `json:"installed_es"` + InstalledKibana []PackageInfo_InstallationInfo_InstalledKibana_Item `json:"installed_kibana"` + InstalledKibanaSpaceId *string `json:"installed_kibana_space_id,omitempty"` + LatestExecutedState *PackageInfo_InstallationInfo_LatestExecutedState `json:"latest_executed_state,omitempty"` + LatestInstallFailedAttempts *[]PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item `json:"latest_install_failed_attempts,omitempty"` + Name string `json:"name"` + Namespaces *[]string `json:"namespaces,omitempty"` + PreviousVersion *string `json:"previous_version,omitempty"` + Type string `json:"type"` + UpdatedAt *string `json:"updated_at,omitempty"` + VerificationKeyId *string `json:"verification_key_id,omitempty"` + VerificationStatus PackageInfoInstallationInfoVerificationStatus `json:"verification_status"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoOwnerType defines model for PackageInfo.Owner.Type. +type PackageInfoOwnerType string + +// PackageInfo_Owner defines model for PackageInfo.Owner. +type PackageInfo_Owner struct { + Github *string `json:"github,omitempty"` + Type *PackageInfoOwnerType `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoRelease defines model for PackageInfo.Release. +type PackageInfoRelease string + +// PackageInfo_Source defines model for PackageInfo.Source. +type PackageInfo_Source struct { + License string `json:"license"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageInfoType0 defines model for PackageInfo.Type.0. +type PackageInfoType0 string + +// PackageInfoType1 defines model for PackageInfo.Type.1. +type PackageInfoType1 string + +// PackageInfoType2 defines model for PackageInfo.Type.2. +type PackageInfoType2 string + +// PackageInfoType3 defines model for . +type PackageInfoType3 = string + +// PackageInfo_Type defines model for PackageInfo.Type. +type PackageInfo_Type struct { + union json.RawMessage +} + +// PackageListItem defines model for package_list_item. +type PackageListItem struct { + Categories *[]string `json:"categories,omitempty"` + Conditions *PackageListItem_Conditions `json:"conditions,omitempty"` + DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` + Description *string `json:"description,omitempty"` + Discovery *PackageListItem_Discovery `json:"discovery,omitempty"` + Download *string `json:"download,omitempty"` + FormatVersion *string `json:"format_version,omitempty"` + Icons *[]PackageListItem_Icons_Item `json:"icons,omitempty"` + Id string `json:"id"` + InstallationInfo *PackageListItem_InstallationInfo `json:"installationInfo,omitempty"` + Integration *string `json:"integration,omitempty"` + Internal *bool `json:"internal,omitempty"` + LatestVersion *string `json:"latestVersion,omitempty"` + Name string `json:"name"` + Owner *PackageListItem_Owner `json:"owner,omitempty"` + Path *string `json:"path,omitempty"` + PolicyTemplates *[]map[string]interface{} `json:"policy_templates,omitempty"` + Readme *string `json:"readme,omitempty"` + Release *PackageListItemRelease `json:"release,omitempty"` + SignaturePath *string `json:"signature_path,omitempty"` + Source *PackageListItem_Source `json:"source,omitempty"` + Status *string `json:"status,omitempty"` + Title string `json:"title"` + Type *PackageListItem_Type `json:"type,omitempty"` + Vars *[]map[string]interface{} `json:"vars,omitempty"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Conditions_Elastic defines model for PackageListItem.Conditions.Elastic. +type PackageListItem_Conditions_Elastic struct { + Capabilities *[]string `json:"capabilities,omitempty"` + Subscription *string `json:"subscription,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Conditions_Kibana defines model for PackageListItem.Conditions.Kibana. +type PackageListItem_Conditions_Kibana struct { + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Conditions defines model for PackageListItem.Conditions. +type PackageListItem_Conditions struct { + Elastic *PackageListItem_Conditions_Elastic `json:"elastic,omitempty"` + Kibana *PackageListItem_Conditions_Kibana `json:"kibana,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Discovery_Datasets_Item defines model for PackageListItem.Discovery.Datasets.Item. +type PackageListItem_Discovery_Datasets_Item struct { + Name string `json:"name"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Discovery_Fields_Item defines model for PackageListItem.Discovery.Fields.Item. +type PackageListItem_Discovery_Fields_Item struct { + Name string `json:"name"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Discovery defines model for PackageListItem.Discovery. +type PackageListItem_Discovery struct { + Datasets *[]PackageListItem_Discovery_Datasets_Item `json:"datasets,omitempty"` + Fields *[]PackageListItem_Discovery_Fields_Item `json:"fields,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_Icons_Item defines model for package_list_item.icons.Item. +type PackageListItem_Icons_Item struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 defines model for PackageListItem.InstallationInfo.AdditionalSpacesInstalledKibana.Type.0. +type PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 string + +// PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 defines model for . +type PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 = string + +// PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type defines model for PackageListItem.InstallationInfo.AdditionalSpacesInstalledKibana.Type. +type PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type struct { + union json.RawMessage +} + +// PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item defines model for PackageListItem.InstallationInfo.AdditionalSpacesInstalledKibana.Item. +type PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features defines model for PackageListItem.InstallationInfo.ExperimentalDataStreamFeatures.Features. +type PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item defines model for PackageListItem.InstallationInfo.ExperimentalDataStreamFeatures.Item. +type PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item struct { + DataStream string `json:"data_stream"` + Features PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features `json:"features"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemInstallationInfoInstallSource defines model for PackageListItem.InstallationInfo.InstallSource. +type PackageListItemInstallationInfoInstallSource string + +// PackageListItemInstallationInfoInstallStatus defines model for PackageListItem.InstallationInfo.InstallStatus. +type PackageListItemInstallationInfoInstallStatus string + +// PackageListItemInstallationInfoInstalledEsType defines model for PackageListItem.InstallationInfo.InstalledEs.Type. +type PackageListItemInstallationInfoInstalledEsType string + +// PackageListItem_InstallationInfo_InstalledEs_Item defines model for PackageListItem.InstallationInfo.InstalledEs.Item. +type PackageListItem_InstallationInfo_InstalledEs_Item struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type PackageListItemInstallationInfoInstalledEsType `json:"type"` + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemInstallationInfoInstalledKibanaType0 defines model for PackageListItem.InstallationInfo.InstalledKibana.Type.0. +type PackageListItemInstallationInfoInstalledKibanaType0 string + +// PackageListItemInstallationInfoInstalledKibanaType1 defines model for . +type PackageListItemInstallationInfoInstalledKibanaType1 = string + +// PackageListItem_InstallationInfo_InstalledKibana_Type defines model for PackageListItem.InstallationInfo.InstalledKibana.Type. +type PackageListItem_InstallationInfo_InstalledKibana_Type struct { + union json.RawMessage +} + +// PackageListItem_InstallationInfo_InstalledKibana_Item defines model for PackageListItem.InstallationInfo.InstalledKibana.Item. +type PackageListItem_InstallationInfo_InstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PackageListItem_InstallationInfo_InstalledKibana_Type `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_InstallationInfo_LatestExecutedState defines model for PackageListItem.InstallationInfo.LatestExecutedState. +type PackageListItem_InstallationInfo_LatestExecutedState struct { + Error *string `json:"error,omitempty"` + Name *string `json:"name,omitempty"` + StartedAt *string `json:"started_at,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error defines model for PackageListItem.InstallationInfo.LatestInstallFailedAttempts.Error. +type PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error struct { + Message string `json:"message"` + Name string `json:"name"` + Stack *string `json:"stack,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item defines model for PackageListItem.InstallationInfo.LatestInstallFailedAttempts.Item. +type PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item struct { + CreatedAt string `json:"created_at"` + Error PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error `json:"error"` + TargetVersion string `json:"target_version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemInstallationInfoVerificationStatus defines model for PackageListItem.InstallationInfo.VerificationStatus. +type PackageListItemInstallationInfoVerificationStatus string + +// PackageListItem_InstallationInfo defines model for PackageListItem.InstallationInfo. +type PackageListItem_InstallationInfo struct { + AdditionalSpacesInstalledKibana *map[string][]PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item `json:"additional_spaces_installed_kibana,omitempty"` + CreatedAt *string `json:"created_at,omitempty"` + ExperimentalDataStreamFeatures *[]PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item `json:"experimental_data_stream_features,omitempty"` + InstallFormatSchemaVersion *string `json:"install_format_schema_version,omitempty"` + InstallSource PackageListItemInstallationInfoInstallSource `json:"install_source"` + InstallStatus PackageListItemInstallationInfoInstallStatus `json:"install_status"` + InstalledEs []PackageListItem_InstallationInfo_InstalledEs_Item `json:"installed_es"` + InstalledKibana []PackageListItem_InstallationInfo_InstalledKibana_Item `json:"installed_kibana"` + InstalledKibanaSpaceId *string `json:"installed_kibana_space_id,omitempty"` + LatestExecutedState *PackageListItem_InstallationInfo_LatestExecutedState `json:"latest_executed_state,omitempty"` + LatestInstallFailedAttempts *[]PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item `json:"latest_install_failed_attempts,omitempty"` + Name string `json:"name"` + Namespaces *[]string `json:"namespaces,omitempty"` + PreviousVersion *string `json:"previous_version,omitempty"` + Type string `json:"type"` + UpdatedAt *string `json:"updated_at,omitempty"` + VerificationKeyId *string `json:"verification_key_id,omitempty"` + VerificationStatus PackageListItemInstallationInfoVerificationStatus `json:"verification_status"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemOwnerType defines model for PackageListItem.Owner.Type. +type PackageListItemOwnerType string + +// PackageListItem_Owner defines model for PackageListItem.Owner. +type PackageListItem_Owner struct { + Github *string `json:"github,omitempty"` + Type *PackageListItemOwnerType `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemRelease defines model for PackageListItem.Release. +type PackageListItemRelease string + +// PackageListItem_Source defines model for PackageListItem.Source. +type PackageListItem_Source struct { + License string `json:"license"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackageListItemType0 defines model for PackageListItem.Type.0. +type PackageListItemType0 string + +// PackageListItemType1 defines model for PackageListItem.Type.1. +type PackageListItemType1 string + +// PackageListItemType2 defines model for PackageListItem.Type.2. +type PackageListItemType2 string + +// PackageListItemType3 defines model for . +type PackageListItemType3 = string + +// PackageListItem_Type defines model for PackageListItem.Type. +type PackageListItem_Type struct { + union json.RawMessage +} + +// PackagePolicy defines model for package_policy. +type PackagePolicy struct { + // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. + AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions,omitempty"` + Agents *float32 `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + CreatedBy string `json:"created_by"` + + // Description Package policy description + Description *string `json:"description,omitempty"` + Elasticsearch *PackagePolicy_Elasticsearch `json:"elasticsearch,omitempty"` + Enabled bool `json:"enabled"` + Id string `json:"id"` + + // Inputs Package policy inputs (see integration documentation to know what inputs are available) + Inputs map[string]PackagePolicyInput `json:"inputs"` + IsManaged *bool `json:"is_managed,omitempty"` + + // Name Package policy name (should be unique) + Name string `json:"name"` + + // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id,omitempty"` + + // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *struct { + Inputs *map[string]interface{} `json:"inputs,omitempty"` + } `json:"overrides,omitempty"` + Package *struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` + } `json:"package,omitempty"` + + // PolicyId Agent policy ID where that package policy will be added + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id,omitempty"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + Revision float32 `json:"revision"` + SecretReferences *[]PackagePolicySecretRef `json:"secret_references,omitempty"` + SpaceIds *[]string `json:"spaceIds,omitempty"` + + // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. + SupportsAgentless *bool `json:"supports_agentless,omitempty"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Vars *map[string]interface{} `json:"vars,omitempty"` + Version *string `json:"version,omitempty"` +} + +// PackagePolicy_Elasticsearch_Privileges defines model for PackagePolicy.Elasticsearch.Privileges. +type PackagePolicy_Elasticsearch_Privileges struct { + Cluster *[]string `json:"cluster,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackagePolicy_Elasticsearch defines model for PackagePolicy.Elasticsearch. +type PackagePolicy_Elasticsearch struct { + Privileges *PackagePolicy_Elasticsearch_Privileges `json:"privileges,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// PackagePolicyInput defines model for package_policy_input. +type PackagePolicyInput struct { + // Enabled enable or disable that input, (default to true) + Enabled *bool `json:"enabled,omitempty"` + + // Streams Input streams (see integration documentation to know what streams are available) + Streams *map[string]PackagePolicyInputStream `json:"streams,omitempty"` + Vars *map[string]interface{} `json:"vars,omitempty"` +} + +// PackagePolicyInputStream defines model for package_policy_input_stream. +type PackagePolicyInputStream struct { + // Enabled enable or disable that stream, (default to true) + Enabled *bool `json:"enabled,omitempty"` + Vars *map[string]interface{} `json:"vars,omitempty"` +} + +// PackagePolicyRequest defines model for package_policy_request. +type PackagePolicyRequest struct { + // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. + AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions,omitempty"` + Description *string `json:"description,omitempty"` + Force *bool `json:"force,omitempty"` + Id *string `json:"id,omitempty"` + + // Inputs Package policy inputs (see integration documentation to know what inputs are available) + Inputs *map[string]PackagePolicyRequestInput `json:"inputs,omitempty"` + Name string `json:"name"` + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id,omitempty"` + Package PackagePolicyRequestPackage `json:"package"` + PolicyId *string `json:"policy_id,omitempty"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + + // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. + SupportsAgentless *bool `json:"supports_agentless,omitempty"` + Vars *map[string]interface{} `json:"vars,omitempty"` +} + +// PackagePolicyRequestInput defines model for package_policy_request_input. +type PackagePolicyRequestInput struct { + // Enabled enable or disable that input, (default to true) + Enabled *bool `json:"enabled,omitempty"` + + // Streams Input streams (see integration documentation to know what streams are available) + Streams *map[string]PackagePolicyRequestInputStream `json:"streams,omitempty"` + Vars *map[string]interface{} `json:"vars,omitempty"` +} + +// PackagePolicyRequestInputStream defines model for package_policy_request_input_stream. +type PackagePolicyRequestInputStream struct { + // Enabled enable or disable that stream, (default to true) + Enabled *bool `json:"enabled,omitempty"` + Vars *map[string]interface{} `json:"vars,omitempty"` +} + +// PackagePolicyRequestPackage defines model for package_policy_request_package. +type PackagePolicyRequestPackage struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` +} + +// PackagePolicySecretRef defines model for package_policy_secret_ref. +type PackagePolicySecretRef struct { + Id string `json:"id"` +} + +// PagerdutyConfig Defines properties for connectors when type is `.pagerduty`. +type PagerdutyConfig struct { + // ApiUrl The PagerDuty event URL. + ApiUrl *string `json:"apiUrl,omitempty"` +} + +// PagerdutySecrets Defines secrets for connectors when type is `.pagerduty`. +type PagerdutySecrets struct { + // RoutingKey A 32 character PagerDuty Integration Key for an integration on a service. + RoutingKey string `json:"routingKey"` +} + +// ParamsEsQueryDslRule An Elasticsearch query rule can run a query defined in Elasticsearch Query DSL and compare the number of matches to a configured threshold. These parameters are appropriate when `rule_type_id` is `.es-query`. +type ParamsEsQueryDslRule struct { + // AggField The name of the numeric field that is used in the aggregation. This property is required when `aggType` is `avg`, `max`, `min` or `sum`. + AggField *Aggfield `json:"aggField,omitempty"` + + // AggType The type of aggregation to perform. + AggType *Aggtype `json:"aggType,omitempty"` + + // EsQuery The query definition, which uses Elasticsearch Query DSL. + EsQuery string `json:"esQuery"` + + // ExcludeHitsFromPreviousRun Indicates whether to exclude matches from previous runs. If `true`, you can avoid alert duplication by excluding documents that have already been detected by the previous rule run. This option is not available when a grouping field is specified. + ExcludeHitsFromPreviousRun *Excludehitsfrompreviousrun `json:"excludeHitsFromPreviousRun,omitempty"` + + // GroupBy Indicates whether the aggregation is applied over all documents (`all`) or split into groups (`top`) using a grouping field (`termField`). If grouping is used, an alert will be created for each group when it exceeds the threshold; only the top groups (up to `termSize` number of groups) are checked. + GroupBy *Groupby `json:"groupBy,omitempty"` + + // Index The indices to query. + Index ParamsEsQueryDslRule_Index `json:"index"` + + // SearchType The type of query, in this case a query that uses Elasticsearch Query DSL. + SearchType *ParamsEsQueryDslRuleSearchType `json:"searchType,omitempty"` + + // Size The number of documents to pass to the configured actions when the threshold condition is met. + Size *Size `json:"size,omitempty"` + + // TermField The names of up to four fields that are used for grouping the aggregation. This property is required when `groupBy` is `top`. + TermField *Termfield `json:"termField,omitempty"` + + // TermSize This property is required when `groupBy` is `top`. It specifies the number of groups to check against the threshold and therefore limits the number of alerts on high cardinality fields. + TermSize *Termsize `json:"termSize,omitempty"` + + // Threshold The threshold value that is used with the `thresholdComparator`. If the `thresholdComparator` is `between` or `notBetween`, you must specify the boundary values. + Threshold Threshold `json:"threshold"` + + // ThresholdComparator The comparison function for the threshold. For example, "is above", "is above or equals", "is below", "is below or equals", "is between", and "is not between". + ThresholdComparator Thresholdcomparator `json:"thresholdComparator"` + + // TimeField The field that is used to calculate the time window. + TimeField Timefield `json:"timeField"` + + // TimeWindowSize The size of the time window (in `timeWindowUnit` units), which determines how far back to search for documents. Generally it should be a value higher than the rule check interval to avoid gaps in detection. + TimeWindowSize Timewindowsize `json:"timeWindowSize"` + + // TimeWindowUnit The type of units for the time window: seconds, minutes, hours, or days. + TimeWindowUnit Timewindowunit `json:"timeWindowUnit"` +} + +// ParamsEsQueryDslRuleIndex0 defines model for . +type ParamsEsQueryDslRuleIndex0 = []string + +// ParamsEsQueryDslRuleIndex1 defines model for . +type ParamsEsQueryDslRuleIndex1 = string + +// ParamsEsQueryDslRule_Index The indices to query. +type ParamsEsQueryDslRule_Index struct { + union json.RawMessage +} + +// ParamsEsQueryDslRuleSearchType The type of query, in this case a query that uses Elasticsearch Query DSL. +type ParamsEsQueryDslRuleSearchType string + +// ParamsEsQueryEsqlRule An Elasticsearch query rule can run an ES|QL query and compare the number of matches to a configured threshold. These parameters are appropriate when `rule_type_id` is `.es-query`. +type ParamsEsQueryEsqlRule struct { + // AggField The name of the numeric field that is used in the aggregation. This property is required when `aggType` is `avg`, `max`, `min` or `sum`. + AggField *Aggfield `json:"aggField,omitempty"` + + // AggType The type of aggregation to perform. + AggType *Aggtype `json:"aggType,omitempty"` + EsqlQuery struct { + // Esql The query definition, which uses Elasticsearch Query Language. + Esql string `json:"esql"` + } `json:"esqlQuery"` + + // ExcludeHitsFromPreviousRun Indicates whether to exclude matches from previous runs. If `true`, you can avoid alert duplication by excluding documents that have already been detected by the previous rule run. This option is not available when a grouping field is specified. + ExcludeHitsFromPreviousRun *Excludehitsfrompreviousrun `json:"excludeHitsFromPreviousRun,omitempty"` + + // GroupBy Indicates whether the aggregation is applied over all documents (`all`) or split into groups (`top`) using a grouping field (`termField`). If grouping is used, an alert will be created for each group when it exceeds the threshold; only the top groups (up to `termSize` number of groups) are checked. + GroupBy *Groupby `json:"groupBy,omitempty"` + + // SearchType The type of query, in this case a query that uses Elasticsearch Query Language (ES|QL). + SearchType ParamsEsQueryEsqlRuleSearchType `json:"searchType"` + + // Size When `searchType` is `esqlQuery`, this property is required but it does not affect the rule behavior. + Size int `json:"size"` + + // TermSize This property is required when `groupBy` is `top`. It specifies the number of groups to check against the threshold and therefore limits the number of alerts on high cardinality fields. + TermSize *Termsize `json:"termSize,omitempty"` + + // Threshold The threshold value that is used with the `thresholdComparator`. When `searchType` is `esqlQuery`, this property is required and must be set to zero. + Threshold []int `json:"threshold"` + + // ThresholdComparator The comparison function for the threshold. When `searchType` is `esqlQuery`, this property is required and must be set to ">". Since the `threshold` value must be `0`, the result is that an alert occurs whenever the query returns results. + ThresholdComparator ParamsEsQueryEsqlRuleThresholdComparator `json:"thresholdComparator"` + + // TimeField The field that is used to calculate the time window. + TimeField *Timefield `json:"timeField,omitempty"` + + // TimeWindowSize The size of the time window (in `timeWindowUnit` units), which determines how far back to search for documents. Generally it should be a value higher than the rule check interval to avoid gaps in detection. + TimeWindowSize Timewindowsize `json:"timeWindowSize"` + + // TimeWindowUnit The type of units for the time window: seconds, minutes, hours, or days. + TimeWindowUnit Timewindowunit `json:"timeWindowUnit"` +} + +// ParamsEsQueryEsqlRuleSearchType The type of query, in this case a query that uses Elasticsearch Query Language (ES|QL). +type ParamsEsQueryEsqlRuleSearchType string + +// ParamsEsQueryEsqlRuleThresholdComparator The comparison function for the threshold. When `searchType` is `esqlQuery`, this property is required and must be set to ">". Since the `threshold` value must be `0`, the result is that an alert occurs whenever the query returns results. +type ParamsEsQueryEsqlRuleThresholdComparator string + +// ParamsEsQueryKqlRule An Elasticsearch query rule can run a query defined in KQL or Lucene and compare the number of matches to a configured threshold. These parameters are appropriate when `rule_type_id` is `.es-query`. +type ParamsEsQueryKqlRule struct { + // AggField The name of the numeric field that is used in the aggregation. This property is required when `aggType` is `avg`, `max`, `min` or `sum`. + AggField *Aggfield `json:"aggField,omitempty"` + + // AggType The type of aggregation to perform. + AggType *Aggtype `json:"aggType,omitempty"` + + // ExcludeHitsFromPreviousRun Indicates whether to exclude matches from previous runs. If `true`, you can avoid alert duplication by excluding documents that have already been detected by the previous rule run. This option is not available when a grouping field is specified. + ExcludeHitsFromPreviousRun *Excludehitsfrompreviousrun `json:"excludeHitsFromPreviousRun,omitempty"` + + // GroupBy Indicates whether the aggregation is applied over all documents (`all`) or split into groups (`top`) using a grouping field (`termField`). If grouping is used, an alert will be created for each group when it exceeds the threshold; only the top groups (up to `termSize` number of groups) are checked. + GroupBy *Groupby `json:"groupBy,omitempty"` + + // SearchConfiguration The query definition, which uses KQL or Lucene to fetch the documents from Elasticsearch. + SearchConfiguration *struct { + Filter *[]Filter `json:"filter,omitempty"` + + // Index The indices to query. + Index *ParamsEsQueryKqlRule_SearchConfiguration_Index `json:"index,omitempty"` + Query *struct { + Language *string `json:"language,omitempty"` + Query *string `json:"query,omitempty"` + } `json:"query,omitempty"` + } `json:"searchConfiguration,omitempty"` + + // SearchType The type of query, in this case a text-based query that uses KQL or Lucene. + SearchType ParamsEsQueryKqlRuleSearchType `json:"searchType"` + + // Size The number of documents to pass to the configured actions when the threshold condition is met. + Size Size `json:"size"` + + // TermField The names of up to four fields that are used for grouping the aggregation. This property is required when `groupBy` is `top`. + TermField *Termfield `json:"termField,omitempty"` + + // TermSize This property is required when `groupBy` is `top`. It specifies the number of groups to check against the threshold and therefore limits the number of alerts on high cardinality fields. + TermSize *Termsize `json:"termSize,omitempty"` + + // Threshold The threshold value that is used with the `thresholdComparator`. If the `thresholdComparator` is `between` or `notBetween`, you must specify the boundary values. + Threshold Threshold `json:"threshold"` + + // ThresholdComparator The comparison function for the threshold. For example, "is above", "is above or equals", "is below", "is below or equals", "is between", and "is not between". + ThresholdComparator Thresholdcomparator `json:"thresholdComparator"` + + // TimeField The field that is used to calculate the time window. + TimeField *Timefield `json:"timeField,omitempty"` + + // TimeWindowSize The size of the time window (in `timeWindowUnit` units), which determines how far back to search for documents. Generally it should be a value higher than the rule check interval to avoid gaps in detection. + TimeWindowSize Timewindowsize `json:"timeWindowSize"` + + // TimeWindowUnit The type of units for the time window: seconds, minutes, hours, or days. + TimeWindowUnit Timewindowunit `json:"timeWindowUnit"` +} + +// ParamsEsQueryKqlRuleSearchConfigurationIndex0 defines model for . +type ParamsEsQueryKqlRuleSearchConfigurationIndex0 = string + +// ParamsEsQueryKqlRuleSearchConfigurationIndex1 defines model for . +type ParamsEsQueryKqlRuleSearchConfigurationIndex1 = []string + +// ParamsEsQueryKqlRule_SearchConfiguration_Index The indices to query. +type ParamsEsQueryKqlRule_SearchConfiguration_Index struct { + union json.RawMessage +} + +// ParamsEsQueryKqlRuleSearchType The type of query, in this case a text-based query that uses KQL or Lucene. +type ParamsEsQueryKqlRuleSearchType string + +// ParamsIndexThresholdRule An index threshold rule runs an Elasticsearch query, aggregates field values from documents, compares them to threshold values, and schedules actions to run when the thresholds are met. These parameters are appropriate when `rule_type_id` is `.index-threshold`. +type ParamsIndexThresholdRule struct { + // AggField The name of the numeric field that is used in the aggregation. This property is required when `aggType` is `avg`, `max`, `min` or `sum`. + AggField *Aggfield `json:"aggField,omitempty"` + + // AggType The type of aggregation to perform. + AggType *Aggtype `json:"aggType,omitempty"` + + // FilterKuery A KQL expression thats limits the scope of alerts. + FilterKuery *string `json:"filterKuery,omitempty"` + + // GroupBy Indicates whether the aggregation is applied over all documents (`all`) or split into groups (`top`) using a grouping field (`termField`). If grouping is used, an alert will be created for each group when it exceeds the threshold; only the top groups (up to `termSize` number of groups) are checked. + GroupBy *Groupby `json:"groupBy,omitempty"` + + // Index The indices to query. + Index []string `json:"index"` + + // TermField The names of up to four fields that are used for grouping the aggregation. This property is required when `groupBy` is `top`. + TermField *Termfield `json:"termField,omitempty"` + + // TermSize This property is required when `groupBy` is `top`. It specifies the number of groups to check against the threshold and therefore limits the number of alerts on high cardinality fields. + TermSize *Termsize `json:"termSize,omitempty"` + + // Threshold The threshold value that is used with the `thresholdComparator`. If the `thresholdComparator` is `between` or `notBetween`, you must specify the boundary values. + Threshold Threshold `json:"threshold"` + + // ThresholdComparator The comparison function for the threshold. For example, "is above", "is above or equals", "is below", "is below or equals", "is between", and "is not between". + ThresholdComparator Thresholdcomparator `json:"thresholdComparator"` + + // TimeField The field that is used to calculate the time window. + TimeField Timefield `json:"timeField"` + + // TimeWindowSize The size of the time window (in `timeWindowUnit` units), which determines how far back to search for documents. Generally it should be a value higher than the rule check interval to avoid gaps in detection. + TimeWindowSize Timewindowsize `json:"timeWindowSize"` + + // TimeWindowUnit The type of units for the time window: seconds, minutes, hours, or days. + TimeWindowUnit Timewindowunit `json:"timeWindowUnit"` +} + +// ParamsPropertyApmAnomaly defines model for params_property_apm_anomaly. +type ParamsPropertyApmAnomaly struct { + // AnomalySeverityType The severity of anomalies that will generate alerts: critical, major, minor, or warning. + AnomalySeverityType ParamsPropertyApmAnomalyAnomalySeverityType `json:"anomalySeverityType"` + + // Environment Filter the rule to apply to a specific environment. + Environment string `json:"environment"` + + // ServiceName Filter the rule to apply to a specific service name. + ServiceName *string `json:"serviceName,omitempty"` + + // TransactionType Filter the rule to apply to a specific transaction type. + TransactionType *string `json:"transactionType,omitempty"` + + // WindowSize The size of the time window (in `windowUnit` units), which determines how far back to search for documents. Generally it should be a value higher than the rule check interval to avoid gaps in detection. + WindowSize float32 `json:"windowSize"` + + // WindowUnit The type of units for the time window. For example: minutes, hours, or days. + WindowUnit ParamsPropertyApmAnomalyWindowUnit `json:"windowUnit"` +} + +// ParamsPropertyApmAnomalyAnomalySeverityType The severity of anomalies that will generate alerts: critical, major, minor, or warning. +type ParamsPropertyApmAnomalyAnomalySeverityType string + +// ParamsPropertyApmAnomalyWindowUnit The type of units for the time window. For example: minutes, hours, or days. +type ParamsPropertyApmAnomalyWindowUnit string + +// ParamsPropertyApmErrorCount defines model for params_property_apm_error_count. +type ParamsPropertyApmErrorCount struct { + // Environment Filter the errors coming from your application to apply the rule to a specific environment. + Environment string `json:"environment"` + + // ErrorGroupingKey Filter the errors coming from your application to apply the rule to a specific error grouping key, which is a hash of the stack trace and other properties. + ErrorGroupingKey *string `json:"errorGroupingKey,omitempty"` + + // GroupBy Perform a composite aggregation against the selected fields. When any of these groups match the selected rule conditions, an alert is triggered per group. + GroupBy *[]ParamsPropertyApmErrorCountGroupBy `json:"groupBy,omitempty"` + + // ServiceName Filter the errors coming from your application to apply the rule to a specific service. + ServiceName *string `json:"serviceName,omitempty"` + + // Threshold The error count threshold. + Threshold float32 `json:"threshold"` + + // WindowSize The time frame in which the errors must occur (in `windowUnit` units). Generally it should be a value higher than the rule check interval to avoid gaps in detection. + WindowSize float32 `json:"windowSize"` + + // WindowUnit The type of units for the time window: minutes, hours, or days. + WindowUnit ParamsPropertyApmErrorCountWindowUnit `json:"windowUnit"` +} + +// ParamsPropertyApmErrorCountGroupBy defines model for ParamsPropertyApmErrorCount.GroupBy. +type ParamsPropertyApmErrorCountGroupBy string + +// ParamsPropertyApmErrorCountWindowUnit The type of units for the time window: minutes, hours, or days. +type ParamsPropertyApmErrorCountWindowUnit string + +// ParamsPropertyApmTransactionDuration defines model for params_property_apm_transaction_duration. +type ParamsPropertyApmTransactionDuration struct { + // AggregationType The type of aggregation to perform. + AggregationType ParamsPropertyApmTransactionDurationAggregationType `json:"aggregationType"` + + // Environment Filter the rule to apply to a specific environment. + Environment string `json:"environment"` + + // GroupBy Perform a composite aggregation against the selected fields. When any of these groups match the selected rule conditions, an alert is triggered per group. + GroupBy *[]ParamsPropertyApmTransactionDurationGroupBy `json:"groupBy,omitempty"` + + // ServiceName Filter the rule to apply to a specific service. + ServiceName *string `json:"serviceName,omitempty"` + + // Threshold The latency threshold value. + Threshold float32 `json:"threshold"` + + // TransactionName Filter the rule to apply to a specific transaction name. + TransactionName *string `json:"transactionName,omitempty"` + + // TransactionType Filter the rule to apply to a specific transaction type. + TransactionType *string `json:"transactionType,omitempty"` + + // WindowSize The size of the time window (in `windowUnit` units), which determines how far back to search for documents. Generally it should be a value higher than the rule check interval to avoid gaps in detection. + WindowSize float32 `json:"windowSize"` + + // WindowUnit The type of units for the time window. For example: minutes, hours, or days. + WindowUnit ParamsPropertyApmTransactionDurationWindowUnit `json:"windowUnit"` +} + +// ParamsPropertyApmTransactionDurationAggregationType The type of aggregation to perform. +type ParamsPropertyApmTransactionDurationAggregationType string + +// ParamsPropertyApmTransactionDurationGroupBy defines model for ParamsPropertyApmTransactionDuration.GroupBy. +type ParamsPropertyApmTransactionDurationGroupBy string + +// ParamsPropertyApmTransactionDurationWindowUnit The type of units for the time window. For example: minutes, hours, or days. +type ParamsPropertyApmTransactionDurationWindowUnit string + +// ParamsPropertyApmTransactionErrorRate defines model for params_property_apm_transaction_error_rate. +type ParamsPropertyApmTransactionErrorRate struct { + // Environment The environment from APM + Environment string `json:"environment"` + GroupBy *[]ParamsPropertyApmTransactionErrorRateGroupBy `json:"groupBy,omitempty"` + + // ServiceName The service name from APM + ServiceName *string `json:"serviceName,omitempty"` + + // Threshold The error rate threshold value + Threshold float32 `json:"threshold"` + + // TransactionName The transaction name from APM + TransactionName *string `json:"transactionName,omitempty"` + + // TransactionType The transaction type from APM + TransactionType *string `json:"transactionType,omitempty"` + + // WindowSize The window size + WindowSize float32 `json:"windowSize"` + + // WindowUnit The window size unit + WindowUnit ParamsPropertyApmTransactionErrorRateWindowUnit `json:"windowUnit"` +} + +// ParamsPropertyApmTransactionErrorRateGroupBy defines model for ParamsPropertyApmTransactionErrorRate.GroupBy. +type ParamsPropertyApmTransactionErrorRateGroupBy string + +// ParamsPropertyApmTransactionErrorRateWindowUnit The window size unit +type ParamsPropertyApmTransactionErrorRateWindowUnit string + +// ParamsPropertyInfraInventory defines model for params_property_infra_inventory. +type ParamsPropertyInfraInventory struct { + AlertOnNoData *bool `json:"alertOnNoData,omitempty"` + Criteria *[]struct { + Comparator *ParamsPropertyInfraInventoryCriteriaComparator `json:"comparator,omitempty"` + CustomMetric *struct { + Aggregation *ParamsPropertyInfraInventoryCriteriaCustomMetricAggregation `json:"aggregation,omitempty"` + Field *string `json:"field,omitempty"` + Id *string `json:"id,omitempty"` + Label *string `json:"label,omitempty"` + Type *ParamsPropertyInfraInventoryCriteriaCustomMetricType `json:"type,omitempty"` + } `json:"customMetric,omitempty"` + Metric *ParamsPropertyInfraInventoryCriteriaMetric `json:"metric,omitempty"` + SourceId *string `json:"sourceId,omitempty"` + Threshold *[]float32 `json:"threshold,omitempty"` + TimeSize *float32 `json:"timeSize,omitempty"` + TimeUnit *ParamsPropertyInfraInventoryCriteriaTimeUnit `json:"timeUnit,omitempty"` + WarningComparator *ParamsPropertyInfraInventoryCriteriaWarningComparator `json:"warningComparator,omitempty"` + WarningThreshold *[]float32 `json:"warningThreshold,omitempty"` + } `json:"criteria,omitempty"` + FilterQuery *string `json:"filterQuery,omitempty"` + FilterQueryText *string `json:"filterQueryText,omitempty"` + NodeType *ParamsPropertyInfraInventoryNodeType `json:"nodeType,omitempty"` + SourceId *string `json:"sourceId,omitempty"` +} + +// ParamsPropertyInfraInventoryCriteriaComparator defines model for ParamsPropertyInfraInventory.Criteria.Comparator. +type ParamsPropertyInfraInventoryCriteriaComparator string + +// ParamsPropertyInfraInventoryCriteriaCustomMetricAggregation defines model for ParamsPropertyInfraInventory.Criteria.CustomMetric.Aggregation. +type ParamsPropertyInfraInventoryCriteriaCustomMetricAggregation string + +// ParamsPropertyInfraInventoryCriteriaCustomMetricType defines model for ParamsPropertyInfraInventory.Criteria.CustomMetric.Type. +type ParamsPropertyInfraInventoryCriteriaCustomMetricType string + +// ParamsPropertyInfraInventoryCriteriaMetric defines model for ParamsPropertyInfraInventory.Criteria.Metric. +type ParamsPropertyInfraInventoryCriteriaMetric string + +// ParamsPropertyInfraInventoryCriteriaTimeUnit defines model for ParamsPropertyInfraInventory.Criteria.TimeUnit. +type ParamsPropertyInfraInventoryCriteriaTimeUnit string + +// ParamsPropertyInfraInventoryCriteriaWarningComparator defines model for ParamsPropertyInfraInventory.Criteria.WarningComparator. +type ParamsPropertyInfraInventoryCriteriaWarningComparator string + +// ParamsPropertyInfraInventoryNodeType defines model for ParamsPropertyInfraInventory.NodeType. +type ParamsPropertyInfraInventoryNodeType string + +// ParamsPropertyInfraMetricThreshold defines model for params_property_infra_metric_threshold. +type ParamsPropertyInfraMetricThreshold struct { + // AlertOnGroupDisappear If true, an alert occurs if a group that previously reported metrics does not report them again over the expected time period. This check is not recommended for dynamically scaling infrastructures that might rapidly start and stop nodes automatically. + AlertOnGroupDisappear *bool `json:"alertOnGroupDisappear,omitempty"` + + // AlertOnNoData If true, an alert occurs if the metrics do not report any data over the expected period or if the query fails. + AlertOnNoData *bool `json:"alertOnNoData,omitempty"` + Criteria *[]ParamsPropertyInfraMetricThreshold_Criteria_Item `json:"criteria,omitempty"` + + // FilterQuery A query that limits the scope of the rule. The rule evaluates only metric data that matches the query. + FilterQuery *string `json:"filterQuery,omitempty"` + + // GroupBy Create an alert for every unique value of the specified fields. For example, you can create a rule per host or every mount point of each host. + // IMPORTANT: If you include the same field in both the `filterQuery` and `groupBy`, you might receive fewer results than you expect. For example, if you filter by `cloud.region: us-east`, grouping by `cloud.region` will have no effect because the filter query can match only one region. + GroupBy *ParamsPropertyInfraMetricThreshold_GroupBy `json:"groupBy,omitempty"` + SourceId *string `json:"sourceId,omitempty"` +} + +// ParamsPropertyInfraMetricThresholdCriteria0 defines model for . +type ParamsPropertyInfraMetricThresholdCriteria0 struct { + AggType *ParamsPropertyInfraMetricThresholdCriteria0AggType `json:"aggType,omitempty"` + + // Comparator The comparison function for the threshold. For example, "is above", "is above or equals", "is below", "is below or equals", "is between", and "outside". + Comparator *ParamsPropertyInfraMetricThresholdCriteria0Comparator `json:"comparator,omitempty"` + Metric *string `json:"metric,omitempty"` + + // Threshold The threshold value that is used with the `comparator`. If the `comparator` is `between`, you must specify the boundary values. + Threshold *[]float32 `json:"threshold,omitempty"` + + // TimeSize The size of the time window (in `timeUnit` units), which determines how far back to search for documents. Generally it should be a value higher than the rule check interval to avoid gaps in detection. + TimeSize *float32 `json:"timeSize,omitempty"` + + // TimeUnit The type of units for the time window: seconds, minutes, hours, or days. + TimeUnit *ParamsPropertyInfraMetricThresholdCriteria0TimeUnit `json:"timeUnit,omitempty"` + WarningComparator *ParamsPropertyInfraMetricThresholdCriteria0WarningComparator `json:"warningComparator,omitempty"` + + // WarningThreshold The threshold value that is used with the `warningComparator`. If the `warningComparator` is `between`, you must specify the boundary values. + WarningThreshold *[]float32 `json:"warningThreshold,omitempty"` +} + +// ParamsPropertyInfraMetricThresholdCriteria0AggType defines model for ParamsPropertyInfraMetricThreshold.Criteria.0.AggType. +type ParamsPropertyInfraMetricThresholdCriteria0AggType string + +// ParamsPropertyInfraMetricThresholdCriteria0Comparator The comparison function for the threshold. For example, "is above", "is above or equals", "is below", "is below or equals", "is between", and "outside". +type ParamsPropertyInfraMetricThresholdCriteria0Comparator string + +// ParamsPropertyInfraMetricThresholdCriteria0TimeUnit The type of units for the time window: seconds, minutes, hours, or days. +type ParamsPropertyInfraMetricThresholdCriteria0TimeUnit string + +// ParamsPropertyInfraMetricThresholdCriteria0WarningComparator defines model for ParamsPropertyInfraMetricThreshold.Criteria.0.WarningComparator. +type ParamsPropertyInfraMetricThresholdCriteria0WarningComparator string + +// ParamsPropertyInfraMetricThresholdCriteria1 defines model for . +type ParamsPropertyInfraMetricThresholdCriteria1 struct { + AggType *ParamsPropertyInfraMetricThresholdCriteria1AggType `json:"aggType,omitempty"` + Comparator *ParamsPropertyInfraMetricThresholdCriteria1Comparator `json:"comparator,omitempty"` + Threshold *[]float32 `json:"threshold,omitempty"` + + // TimeSize The size of the time window (in `timeUnit` units), which determines how far back to search for documents. Generally it should be a value higher than the rule check interval to avoid gaps in detection. + TimeSize *float32 `json:"timeSize,omitempty"` + + // TimeUnit The type of units for the time window: seconds, minutes, hours, or days. + TimeUnit *ParamsPropertyInfraMetricThresholdCriteria1TimeUnit `json:"timeUnit,omitempty"` + WarningComparator *ParamsPropertyInfraMetricThresholdCriteria1WarningComparator `json:"warningComparator,omitempty"` + WarningThreshold *[]float32 `json:"warningThreshold,omitempty"` +} + +// ParamsPropertyInfraMetricThresholdCriteria1AggType defines model for ParamsPropertyInfraMetricThreshold.Criteria.1.AggType. +type ParamsPropertyInfraMetricThresholdCriteria1AggType string + +// ParamsPropertyInfraMetricThresholdCriteria1Comparator defines model for ParamsPropertyInfraMetricThreshold.Criteria.1.Comparator. +type ParamsPropertyInfraMetricThresholdCriteria1Comparator string + +// ParamsPropertyInfraMetricThresholdCriteria1TimeUnit The type of units for the time window: seconds, minutes, hours, or days. +type ParamsPropertyInfraMetricThresholdCriteria1TimeUnit string + +// ParamsPropertyInfraMetricThresholdCriteria1WarningComparator defines model for ParamsPropertyInfraMetricThreshold.Criteria.1.WarningComparator. +type ParamsPropertyInfraMetricThresholdCriteria1WarningComparator string + +// ParamsPropertyInfraMetricThresholdCriteria2 defines model for . +type ParamsPropertyInfraMetricThresholdCriteria2 struct { + AggType *ParamsPropertyInfraMetricThresholdCriteria2AggType `json:"aggType,omitempty"` + Comparator *ParamsPropertyInfraMetricThresholdCriteria2Comparator `json:"comparator,omitempty"` + CustomMetric *[]ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item `json:"customMetric,omitempty"` + Equation *string `json:"equation,omitempty"` + Label *string `json:"label,omitempty"` + Threshold *[]float32 `json:"threshold,omitempty"` + + // TimeSize The size of the time window (in `timeUnit` units), which determines how far back to search for documents. Generally it should be a value higher than the rule check interval to avoid gaps in detection. + TimeSize *float32 `json:"timeSize,omitempty"` + + // TimeUnit The type of units for the time window: seconds, minutes, hours, or days. + TimeUnit *ParamsPropertyInfraMetricThresholdCriteria2TimeUnit `json:"timeUnit,omitempty"` + WarningComparator *ParamsPropertyInfraMetricThresholdCriteria2WarningComparator `json:"warningComparator,omitempty"` + WarningThreshold *[]float32 `json:"warningThreshold,omitempty"` +} + +// ParamsPropertyInfraMetricThresholdCriteria2AggType defines model for ParamsPropertyInfraMetricThreshold.Criteria.2.AggType. +type ParamsPropertyInfraMetricThresholdCriteria2AggType string + +// ParamsPropertyInfraMetricThresholdCriteria2Comparator defines model for ParamsPropertyInfraMetricThreshold.Criteria.2.Comparator. +type ParamsPropertyInfraMetricThresholdCriteria2Comparator string + +// ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0 defines model for . +type ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0 struct { + // AggType An aggregation to gather data for the rule. For example, find the average, highest or lowest value of a numeric field. Or use a cardinality aggregation to find the approximate number of unique values in a field. + AggType *ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0AggType `json:"aggType,omitempty"` + Field *string `json:"field,omitempty"` + Name *string `json:"name,omitempty"` +} + +// ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0AggType An aggregation to gather data for the rule. For example, find the average, highest or lowest value of a numeric field. Or use a cardinality aggregation to find the approximate number of unique values in a field. +type ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0AggType string + +// ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1 defines model for . +type ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1 struct { + AggType *ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1AggType `json:"aggType,omitempty"` + Filter *string `json:"filter,omitempty"` + Name *string `json:"name,omitempty"` +} + +// ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1AggType defines model for ParamsPropertyInfraMetricThreshold.Criteria.2.CustomMetric.1.AggType. +type ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1AggType string + +// ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item defines model for ParamsPropertyInfraMetricThreshold.Criteria.2.customMetric.Item. +type ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item struct { + union json.RawMessage +} + +// ParamsPropertyInfraMetricThresholdCriteria2TimeUnit The type of units for the time window: seconds, minutes, hours, or days. +type ParamsPropertyInfraMetricThresholdCriteria2TimeUnit string + +// ParamsPropertyInfraMetricThresholdCriteria2WarningComparator defines model for ParamsPropertyInfraMetricThreshold.Criteria.2.WarningComparator. +type ParamsPropertyInfraMetricThresholdCriteria2WarningComparator string + +// ParamsPropertyInfraMetricThreshold_Criteria_Item defines model for params_property_infra_metric_threshold.criteria.Item. +type ParamsPropertyInfraMetricThreshold_Criteria_Item struct { + union json.RawMessage +} + +// ParamsPropertyInfraMetricThresholdGroupBy0 defines model for . +type ParamsPropertyInfraMetricThresholdGroupBy0 = string + +// ParamsPropertyInfraMetricThresholdGroupBy1 defines model for . +type ParamsPropertyInfraMetricThresholdGroupBy1 = []string + +// ParamsPropertyInfraMetricThreshold_GroupBy Create an alert for every unique value of the specified fields. For example, you can create a rule per host or every mount point of each host. +// IMPORTANT: If you include the same field in both the `filterQuery` and `groupBy`, you might receive fewer results than you expect. For example, if you filter by `cloud.region: us-east`, grouping by `cloud.region` will have no effect because the filter query can match only one region. +type ParamsPropertyInfraMetricThreshold_GroupBy struct { + union json.RawMessage +} + +// ParamsPropertyLogThreshold defines model for params_property_log_threshold. +type ParamsPropertyLogThreshold struct { + union json.RawMessage +} + +// ParamsPropertyLogThreshold0 defines model for . +type ParamsPropertyLogThreshold0 struct { + Count struct { + Comparator *ParamsPropertyLogThreshold0CountComparator `json:"comparator,omitempty"` + Value *float32 `json:"value,omitempty"` + } `json:"count"` + Criteria *[]struct { + Comparator *ParamsPropertyLogThreshold0CriteriaComparator `json:"comparator,omitempty"` + Field *string `json:"field,omitempty"` + Value *ParamsPropertyLogThreshold_0_Criteria_Value `json:"value,omitempty"` + } `json:"criteria,omitempty"` + GroupBy *[]string `json:"groupBy,omitempty"` + LogView struct { + LogViewId *string `json:"logViewId,omitempty"` + Type *ParamsPropertyLogThreshold0LogViewType `json:"type,omitempty"` + } `json:"logView"` + TimeSize float32 `json:"timeSize"` + TimeUnit ParamsPropertyLogThreshold0TimeUnit `json:"timeUnit"` +} + +// ParamsPropertyLogThreshold0CountComparator defines model for ParamsPropertyLogThreshold.0.Count.Comparator. +type ParamsPropertyLogThreshold0CountComparator string + +// ParamsPropertyLogThreshold0CriteriaComparator defines model for ParamsPropertyLogThreshold.0.Criteria.Comparator. +type ParamsPropertyLogThreshold0CriteriaComparator string + +// ParamsPropertyLogThreshold0CriteriaValue0 defines model for . +type ParamsPropertyLogThreshold0CriteriaValue0 = float32 + +// ParamsPropertyLogThreshold0CriteriaValue1 defines model for . +type ParamsPropertyLogThreshold0CriteriaValue1 = string + +// ParamsPropertyLogThreshold_0_Criteria_Value defines model for ParamsPropertyLogThreshold.0.Criteria.Value. +type ParamsPropertyLogThreshold_0_Criteria_Value struct { + union json.RawMessage +} + +// ParamsPropertyLogThreshold0LogViewType defines model for ParamsPropertyLogThreshold.0.LogView.Type. +type ParamsPropertyLogThreshold0LogViewType string + +// ParamsPropertyLogThreshold0TimeUnit defines model for ParamsPropertyLogThreshold.0.TimeUnit. +type ParamsPropertyLogThreshold0TimeUnit string + +// ParamsPropertyLogThreshold1 defines model for . +type ParamsPropertyLogThreshold1 struct { + Count struct { + Comparator *ParamsPropertyLogThreshold1CountComparator `json:"comparator,omitempty"` + Value *float32 `json:"value,omitempty"` + } `json:"count"` + Criteria *[][]struct { + Comparator *ParamsPropertyLogThreshold1CriteriaComparator `json:"comparator,omitempty"` + Field *string `json:"field,omitempty"` + Value *ParamsPropertyLogThreshold_1_Criteria_Value `json:"value,omitempty"` + } `json:"criteria,omitempty"` + GroupBy *[]string `json:"groupBy,omitempty"` + LogView struct { + LogViewId *string `json:"logViewId,omitempty"` + Type *ParamsPropertyLogThreshold1LogViewType `json:"type,omitempty"` + } `json:"logView"` + TimeSize float32 `json:"timeSize"` + TimeUnit ParamsPropertyLogThreshold1TimeUnit `json:"timeUnit"` +} + +// ParamsPropertyLogThreshold1CountComparator defines model for ParamsPropertyLogThreshold.1.Count.Comparator. +type ParamsPropertyLogThreshold1CountComparator string + +// ParamsPropertyLogThreshold1CriteriaComparator defines model for ParamsPropertyLogThreshold.1.Criteria.Comparator. +type ParamsPropertyLogThreshold1CriteriaComparator string + +// ParamsPropertyLogThreshold1CriteriaValue0 defines model for . +type ParamsPropertyLogThreshold1CriteriaValue0 = float32 + +// ParamsPropertyLogThreshold1CriteriaValue1 defines model for . +type ParamsPropertyLogThreshold1CriteriaValue1 = string + +// ParamsPropertyLogThreshold_1_Criteria_Value defines model for ParamsPropertyLogThreshold.1.Criteria.Value. +type ParamsPropertyLogThreshold_1_Criteria_Value struct { + union json.RawMessage +} + +// ParamsPropertyLogThreshold1LogViewType defines model for ParamsPropertyLogThreshold.1.LogView.Type. +type ParamsPropertyLogThreshold1LogViewType string + +// ParamsPropertyLogThreshold1TimeUnit defines model for ParamsPropertyLogThreshold.1.TimeUnit. +type ParamsPropertyLogThreshold1TimeUnit string + +// ParamsPropertySloBurnRate defines model for params_property_slo_burn_rate. +type ParamsPropertySloBurnRate struct { + // BurnRateThreshold The burn rate threshold used to trigger the alert + BurnRateThreshold *float32 `json:"burnRateThreshold,omitempty"` + + // LongWindow The duration of the long window used to compute the burn rate + LongWindow *struct { + // Unit The duration unit + Unit *string `json:"unit,omitempty"` + + // Value The duration value + Value *float32 `json:"value,omitempty"` + } `json:"longWindow,omitempty"` + + // MaxBurnRateThreshold The maximum burn rate threshold value defined by the SLO error budget + MaxBurnRateThreshold *float32 `json:"maxBurnRateThreshold,omitempty"` + + // ShortWindow The duration of the short window used to compute the burn rate + ShortWindow *struct { + // Unit The duration unit + Unit *string `json:"unit,omitempty"` + + // Value The duration value + Value *float32 `json:"value,omitempty"` + } `json:"shortWindow,omitempty"` + + // SloId The SLO identifier used by the rule + SloId *string `json:"sloId,omitempty"` +} + +// ParamsPropertySyntheticsMonitorStatus defines model for params_property_synthetics_monitor_status. +type ParamsPropertySyntheticsMonitorStatus struct { + Availability *struct { + Range *float32 `json:"range,omitempty"` + RangeUnit *string `json:"rangeUnit,omitempty"` + Threshold *string `json:"threshold,omitempty"` + } `json:"availability,omitempty"` + Filters *ParamsPropertySyntheticsMonitorStatus_Filters `json:"filters,omitempty"` + IsAutoGenerated *bool `json:"isAutoGenerated,omitempty"` + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Locations *[]string `json:"locations,omitempty"` + NumTimes float32 `json:"numTimes"` + Search *string `json:"search,omitempty"` + ShouldCheckAvailability bool `json:"shouldCheckAvailability"` + ShouldCheckStatus bool `json:"shouldCheckStatus"` + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Timerange *struct { + From *string `json:"from,omitempty"` + To *string `json:"to,omitempty"` + } `json:"timerange,omitempty"` + TimerangeCount *float32 `json:"timerangeCount,omitempty"` + TimerangeUnit *string `json:"timerangeUnit,omitempty"` + Version *float32 `json:"version,omitempty"` +} + +// ParamsPropertySyntheticsMonitorStatusFilters0 defines model for . +type ParamsPropertySyntheticsMonitorStatusFilters0 = string + +// ParamsPropertySyntheticsMonitorStatusFilters1 defines model for . +type ParamsPropertySyntheticsMonitorStatusFilters1 struct { + MonitorType *[]string `json:"monitor.type,omitempty"` + ObserverGeoName *[]string `json:"observer.geo.name,omitempty"` + Tags *[]string `json:"tags,omitempty"` + UrlPort *[]string `json:"url.port,omitempty"` +} + +// ParamsPropertySyntheticsMonitorStatus_Filters defines model for ParamsPropertySyntheticsMonitorStatus.Filters. +type ParamsPropertySyntheticsMonitorStatus_Filters struct { + union json.RawMessage +} + +// ParamsPropertySyntheticsUptimeTls defines model for params_property_synthetics_uptime_tls. +type ParamsPropertySyntheticsUptimeTls struct { + CertAgeThreshold *float32 `json:"certAgeThreshold,omitempty"` + CertExpirationThreshold *float32 `json:"certExpirationThreshold,omitempty"` + Search *string `json:"search,omitempty"` +} + +// Pfx If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-pfx`, it is a base64 encoded version of the PFX or P12 file. +type Pfx = string + +// ResilientConfig Defines properties for connectors when type is `.resilient`. +type ResilientConfig struct { + // ApiUrl The IBM Resilient instance URL. + ApiUrl string `json:"apiUrl"` + + // OrgId The IBM Resilient organization ID. + OrgId string `json:"orgId"` +} + +// ResilientSecrets Defines secrets for connectors when type is `.resilient`. +type ResilientSecrets struct { + // ApiKeyId The authentication key ID for HTTP Basic authentication. + ApiKeyId string `json:"apiKeyId"` + + // ApiKeySecret The authentication key secret for HTTP Basic authentication. + ApiKeySecret string `json:"apiKeySecret"` +} + +// RunAcknowledgeResolvePagerduty Test an action that acknowledges or resolves a PagerDuty alert. +type RunAcknowledgeResolvePagerduty struct { + // DedupKey The deduplication key for the PagerDuty alert. + DedupKey string `json:"dedupKey"` + + // EventAction The type of event. + EventAction RunAcknowledgeResolvePagerdutyEventAction `json:"eventAction"` +} + +// RunAcknowledgeResolvePagerdutyEventAction The type of event. +type RunAcknowledgeResolvePagerdutyEventAction string + +// RunAddevent The `addEvent` subaction for ServiceNow ITOM connectors. +type RunAddevent struct { + // SubAction The action to test. + SubAction RunAddeventSubAction `json:"subAction"` + + // SubActionParams The set of configuration properties for the action. + SubActionParams *struct { + // AdditionalInfo Additional information about the event. + AdditionalInfo *string `json:"additional_info,omitempty"` + + // Description The details about the event. + Description *string `json:"description,omitempty"` + + // EventClass A specific instance of the source. + EventClass *string `json:"event_class,omitempty"` + + // MessageKey All actions sharing this key are associated with the same ServiceNow alert. The default value is `:`. + MessageKey *string `json:"message_key,omitempty"` + + // MetricName The name of the metric. + MetricName *string `json:"metric_name,omitempty"` + + // Node The host that the event was triggered for. + Node *string `json:"node,omitempty"` + + // Resource The name of the resource. + Resource *string `json:"resource,omitempty"` + + // Severity The severity of the event. + Severity *string `json:"severity,omitempty"` + + // Source The name of the event source type. + Source *string `json:"source,omitempty"` + + // TimeOfEvent The time of the event. + TimeOfEvent *string `json:"time_of_event,omitempty"` + + // Type The type of event. + Type *string `json:"type,omitempty"` + } `json:"subActionParams,omitempty"` +} + +// RunAddeventSubAction The action to test. +type RunAddeventSubAction string + +// RunClosealert The `closeAlert` subaction for Opsgenie connectors. +type RunClosealert struct { + // SubAction The action to test. + SubAction RunClosealertSubAction `json:"subAction"` + SubActionParams struct { + // Alias The unique identifier used for alert deduplication in Opsgenie. The alias must match the value used when creating the alert. + Alias string `json:"alias"` + + // Note Additional information for the alert. + Note *string `json:"note,omitempty"` + + // Source The display name for the source of the alert. + Source *string `json:"source,omitempty"` + + // User The display name for the owner. + User *string `json:"user,omitempty"` + } `json:"subActionParams"` +} + +// RunClosealertSubAction The action to test. +type RunClosealertSubAction string + +// RunCloseincident The `closeIncident` subaction for ServiceNow ITSM connectors. +type RunCloseincident struct { + // SubAction The action to test. + SubAction RunCloseincidentSubAction `json:"subAction"` + SubActionParams struct { + Incident RunCloseincident_SubActionParams_Incident `json:"incident"` + } `json:"subActionParams"` +} + +// RunCloseincidentSubAction The action to test. +type RunCloseincidentSubAction string + +// RunCloseincidentSubActionParamsIncident0 defines model for . +type RunCloseincidentSubActionParamsIncident0 = interface{} + +// RunCloseincidentSubActionParamsIncident1 defines model for . +type RunCloseincidentSubActionParamsIncident1 = interface{} + +// RunCloseincident_SubActionParams_Incident defines model for RunCloseincident.SubActionParams.Incident. +type RunCloseincident_SubActionParams_Incident struct { + // CorrelationId An identifier that is assigned to the incident when it is created by the connector. NOTE: If you use the default value and the rule generates multiple alerts that use the same alert IDs, the latest open incident for this correlation ID is closed unless you specify the external ID. + CorrelationId *string `json:"correlation_id,omitempty"` + + // ExternalId The unique identifier (`incidentId`) for the incident in ServiceNow. + ExternalId *string `json:"externalId,omitempty"` + union json.RawMessage +} + +// RunCreatealert The `createAlert` subaction for Opsgenie and TheHive connectors. +type RunCreatealert struct { + // SubAction The action to test. + SubAction RunCreatealertSubAction `json:"subAction"` + SubActionParams struct { + // Actions The custom actions available to the alert in Opsgenie connectors. + Actions *[]string `json:"actions,omitempty"` + + // Alias The unique identifier used for alert deduplication in Opsgenie. + Alias *string `json:"alias,omitempty"` + + // Description A description that provides detailed information about the alert. + Description *string `json:"description,omitempty"` + + // Details The custom properties of the alert in Opsgenie connectors. + Details *map[string]interface{} `json:"details,omitempty"` + + // Entity The domain of the alert in Opsgenie connectors. For example, the application or server name. + Entity *string `json:"entity,omitempty"` + + // Message The alert message in Opsgenie connectors. + Message *string `json:"message,omitempty"` + + // Note Additional information for the alert in Opsgenie connectors. + Note *string `json:"note,omitempty"` + + // Priority The priority level for the alert in Opsgenie connectors. + Priority *RunCreatealertSubActionParamsPriority `json:"priority,omitempty"` + + // Responders The entities to receive notifications about the alert in Opsgenie connectors. If `type` is `user`, either `id` or `username` is required. If `type` is `team`, either `id` or `name` is required. + Responders *[]struct { + // Id The identifier for the entity. + Id *string `json:"id,omitempty"` + + // Name The name of the entity. + Name *string `json:"name,omitempty"` + + // Type The type of responders, in this case `escalation`. + Type *RunCreatealertSubActionParamsRespondersType `json:"type,omitempty"` + + // Username A valid email address for the user. + Username *string `json:"username,omitempty"` + } `json:"responders,omitempty"` + + // Severity The severity of the incident for TheHive connectors. The value ranges from 1 (low) to 4 (critical) with a default value of 2 (medium). + Severity *int `json:"severity,omitempty"` + + // Source The display name for the source of the alert in Opsgenie and TheHive connectors. + Source *string `json:"source,omitempty"` + + // SourceRef A source reference for the alert in TheHive connectors. + SourceRef *string `json:"sourceRef,omitempty"` + + // Tags The tags for the alert in Opsgenie and TheHive connectors. + Tags *[]string `json:"tags,omitempty"` + + // Title A title for the incident for TheHive connectors. It is used for searching the contents of the knowledge base. + Title *string `json:"title,omitempty"` + + // Tlp The traffic light protocol designation for the incident in TheHive connectors. Valid values include: 0 (clear), 1 (green), 2 (amber), 3 (amber and strict), and 4 (red). + Tlp *int `json:"tlp,omitempty"` + + // Type The type of alert in TheHive connectors. + Type *string `json:"type,omitempty"` + + // User The display name for the owner. + User *string `json:"user,omitempty"` + + // VisibleTo The teams and users that the alert will be visible to without sending a notification. Only one of `id`, `name`, or `username` is required. + VisibleTo *[]struct { + // Id The identifier for the entity. + Id *string `json:"id,omitempty"` + + // Name The name of the entity. + Name *string `json:"name,omitempty"` + + // Type Valid values are `team` and `user`. + Type RunCreatealertSubActionParamsVisibleToType `json:"type"` + + // Username The user name. This property is required only when the `type` is `user`. + Username *string `json:"username,omitempty"` + } `json:"visibleTo,omitempty"` + } `json:"subActionParams"` +} + +// RunCreatealertSubAction The action to test. +type RunCreatealertSubAction string + +// RunCreatealertSubActionParamsPriority The priority level for the alert in Opsgenie connectors. +type RunCreatealertSubActionParamsPriority string + +// RunCreatealertSubActionParamsRespondersType The type of responders, in this case `escalation`. +type RunCreatealertSubActionParamsRespondersType string + +// RunCreatealertSubActionParamsVisibleToType Valid values are `team` and `user`. +type RunCreatealertSubActionParamsVisibleToType string + +// RunDocuments Test an action that indexes a document into Elasticsearch. +type RunDocuments struct { + // Documents The documents in JSON format for index connectors. + Documents []map[string]interface{} `json:"documents"` +} + +// RunFieldsbyissuetype The `fieldsByIssueType` subaction for Jira connectors. +type RunFieldsbyissuetype struct { + // SubAction The action to test. + SubAction RunFieldsbyissuetypeSubAction `json:"subAction"` + SubActionParams struct { + // Id The Jira issue type identifier. + Id string `json:"id"` + } `json:"subActionParams"` +} + +// RunFieldsbyissuetypeSubAction The action to test. +type RunFieldsbyissuetypeSubAction string + +// RunGetagentdetails The `getAgentDetails` subaction for CrowdStrike connectors. +type RunGetagentdetails struct { + // SubAction The action to test. + SubAction RunGetagentdetailsSubAction `json:"subAction"` + + // SubActionParams The set of configuration properties for the action. + SubActionParams struct { + // Ids An array of CrowdStrike agent identifiers. + Ids []string `json:"ids"` + } `json:"subActionParams"` +} + +// RunGetagentdetailsSubAction The action to test. +type RunGetagentdetailsSubAction string + +// RunGetagents The `getAgents` subaction for SentinelOne connectors. +type RunGetagents struct { + // SubAction The action to test. + SubAction RunGetagentsSubAction `json:"subAction"` +} + +// RunGetagentsSubAction The action to test. +type RunGetagentsSubAction string + +// RunGetchoices The `getChoices` subaction for ServiceNow ITOM, ServiceNow ITSM, and ServiceNow SecOps connectors. +type RunGetchoices struct { + // SubAction The action to test. + SubAction RunGetchoicesSubAction `json:"subAction"` + + // SubActionParams The set of configuration properties for the action. + SubActionParams struct { + // Fields An array of fields. + Fields []string `json:"fields"` + } `json:"subActionParams"` +} + +// RunGetchoicesSubAction The action to test. +type RunGetchoicesSubAction string + +// RunGetfields The `getFields` subaction for Jira, ServiceNow ITSM, and ServiceNow SecOps connectors. +type RunGetfields struct { + // SubAction The action to test. + SubAction RunGetfieldsSubAction `json:"subAction"` +} + +// RunGetfieldsSubAction The action to test. +type RunGetfieldsSubAction string + +// RunGetincident The `getIncident` subaction for Jira, ServiceNow ITSM, and ServiceNow SecOps connectors. +type RunGetincident struct { + // SubAction The action to test. + SubAction RunGetincidentSubAction `json:"subAction"` + SubActionParams struct { + // ExternalId The Jira, ServiceNow ITSM, or ServiceNow SecOps issue identifier. + ExternalId string `json:"externalId"` + } `json:"subActionParams"` +} + +// RunGetincidentSubAction The action to test. +type RunGetincidentSubAction string + +// RunIssue The `issue` subaction for Jira connectors. +type RunIssue struct { + // SubAction The action to test. + SubAction RunIssueSubAction `json:"subAction"` + SubActionParams *struct { + // Id The Jira issue identifier. + Id string `json:"id"` + } `json:"subActionParams,omitempty"` +} + +// RunIssueSubAction The action to test. +type RunIssueSubAction string + +// RunIssues The `issues` subaction for Jira connectors. +type RunIssues struct { + // SubAction The action to test. + SubAction RunIssuesSubAction `json:"subAction"` + SubActionParams struct { + // Title The title of the Jira issue. + Title string `json:"title"` + } `json:"subActionParams"` +} + +// RunIssuesSubAction The action to test. +type RunIssuesSubAction string + +// RunIssuetypes The `issueTypes` subaction for Jira connectors. +type RunIssuetypes struct { + // SubAction The action to test. + SubAction RunIssuetypesSubAction `json:"subAction"` +} + +// RunIssuetypesSubAction The action to test. +type RunIssuetypesSubAction string + +// RunMessageEmail Test an action that sends an email message. There must be at least one recipient in `to`, `cc`, or `bcc`. +type RunMessageEmail struct { + // Bcc A list of "blind carbon copy" email addresses. Addresses can be specified in `user@host-name` format or in name `` format + Bcc *[]string `json:"bcc,omitempty"` + + // Cc A list of "carbon copy" email addresses. Addresses can be specified in `user@host-name` format or in name `` format + Cc *[]string `json:"cc,omitempty"` + + // Message The email message text. Markdown format is supported. + Message string `json:"message"` + + // Subject The subject line of the email. + Subject string `json:"subject"` + + // To A list of email addresses. Addresses can be specified in `user@host-name` format or in name `` format. + To *[]string `json:"to,omitempty"` +} + +// RunMessageServerlog Test an action that writes an entry to the Kibana server log. +type RunMessageServerlog struct { + // Level The log level of the message for server log connectors. + Level *RunMessageServerlogLevel `json:"level,omitempty"` + + // Message The message for server log connectors. + Message string `json:"message"` +} + +// RunMessageServerlogLevel The log level of the message for server log connectors. +type RunMessageServerlogLevel string + +// RunMessageSlack Test an action that sends a message to Slack. It is applicable only when the connector type is `.slack`. +type RunMessageSlack struct { + // Message The Slack message text, which cannot contain Markdown, images, or other advanced formatting. + Message string `json:"message"` +} + +// RunPostmessage Test an action that sends a message to Slack. It is applicable only when the connector type is `.slack_api`. +type RunPostmessage struct { + // SubAction The action to test. + SubAction RunPostmessageSubAction `json:"subAction"` + + // SubActionParams The set of configuration properties for the action. + SubActionParams struct { + // ChannelIds The Slack channel identifier, which must be one of the `allowedChannels` in the connector configuration. + ChannelIds *[]string `json:"channelIds,omitempty"` + + // Channels The name of a channel that your Slack app has access to. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Channels *[]string `json:"channels,omitempty"` + + // Text The Slack message text. If it is a Slack webhook connector, the text cannot contain Markdown, images, or other advanced formatting. If it is a Slack web API connector, it can contain either plain text or block kit messages. + Text *string `json:"text,omitempty"` + } `json:"subActionParams"` +} + +// RunPostmessageSubAction The action to test. +type RunPostmessageSubAction string + +// RunPushtoservice The `pushToService` subaction for Jira, ServiceNow ITSM, ServiceNow SecOps, Swimlane, TheHive, and Webhook - Case Management connectors. +type RunPushtoservice struct { + // SubAction The action to test. + SubAction RunPushtoserviceSubAction `json:"subAction"` + + // SubActionParams The set of configuration properties for the action. + SubActionParams struct { + // Comments Additional information that is sent to Jira, ServiceNow ITSM, ServiceNow SecOps, Swimlane, or TheHive. + Comments *[]struct { + // Comment A comment related to the incident. For example, describe how to troubleshoot the issue. + Comment *string `json:"comment,omitempty"` + + // CommentId A unique identifier for the comment. + CommentId *int `json:"commentId,omitempty"` + } `json:"comments,omitempty"` + + // Incident Information necessary to create or update a Jira, ServiceNow ITSM, ServiveNow SecOps, Swimlane, or TheHive incident. + Incident *struct { + // AdditionalFields Additional fields for ServiceNow ITSM and ServiveNow SecOps connectors. The fields must exist in the Elastic ServiceNow application and must be specified in JSON format. + AdditionalFields *string `json:"additional_fields,omitempty"` + + // AlertId The alert identifier for Swimlane connectors. + AlertId *string `json:"alertId,omitempty"` + + // CaseId The case identifier for the incident for Swimlane connectors. + CaseId *string `json:"caseId,omitempty"` + + // CaseName The case name for the incident for Swimlane connectors. + CaseName *string `json:"caseName,omitempty"` + + // Category The category of the incident for ServiceNow ITSM and ServiceNow SecOps connectors. + Category *string `json:"category,omitempty"` + + // CorrelationDisplay A descriptive label of the alert for correlation purposes for ServiceNow ITSM and ServiceNow SecOps connectors. + CorrelationDisplay *string `json:"correlation_display,omitempty"` + + // CorrelationId The correlation identifier for the security incident for ServiceNow ITSM and ServiveNow SecOps connectors. Connectors using the same correlation ID are associated with the same ServiceNow incident. This value determines whether a new ServiceNow incident is created or an existing one is updated. Modifying this value is optional; if not modified, the rule ID and alert ID are combined as `{{ruleID}}:{{alert ID}}` to form the correlation ID value in ServiceNow. The maximum character length for this value is 100 characters. NOTE: Using the default configuration of `{{ruleID}}:{{alert ID}}` ensures that ServiceNow creates a separate incident record for every generated alert that uses a unique alert ID. If the rule generates multiple alerts that use the same alert IDs, ServiceNow creates and continually updates a single incident record for the alert. + CorrelationId *string `json:"correlation_id,omitempty"` + + // Description The description of the incident for Jira, ServiceNow ITSM, ServiceNow SecOps, Swimlane, TheHive, and Webhook - Case Management connectors. + Description *string `json:"description,omitempty"` + + // DestIp A list of destination IP addresses related to the security incident for ServiceNow SecOps connectors. The IPs are added as observables to the security incident. + DestIp *RunPushtoservice_SubActionParams_Incident_DestIp `json:"dest_ip,omitempty"` + + // ExternalId The Jira, ServiceNow ITSM, or ServiceNow SecOps issue identifier. If present, the incident is updated. Otherwise, a new incident is created. + ExternalId *string `json:"externalId,omitempty"` + + // Id The external case identifier for Webhook - Case Management connectors. + Id *string `json:"id,omitempty"` + + // Impact The impact of the incident for ServiceNow ITSM connectors. + Impact *string `json:"impact,omitempty"` + + // IssueType The type of incident for Jira connectors. For example, 10006. To obtain the list of valid values, set `subAction` to `issueTypes`. + IssueType *int `json:"issueType,omitempty"` + + // Labels The labels for the incident for Jira connectors. NOTE: Labels cannot contain spaces. + Labels *[]string `json:"labels,omitempty"` + + // MalwareHash A list of malware hashes related to the security incident for ServiceNow SecOps connectors. The hashes are added as observables to the security incident. + MalwareHash *RunPushtoservice_SubActionParams_Incident_MalwareHash `json:"malware_hash,omitempty"` + + // MalwareUrl A list of malware URLs related to the security incident for ServiceNow SecOps connectors. The URLs are added as observables to the security incident. + MalwareUrl *string `json:"malware_url,omitempty"` + + // OtherFields Custom field identifiers and their values for Jira connectors. + OtherFields *map[string]interface{} `json:"otherFields,omitempty"` + + // Parent The ID or key of the parent issue for Jira connectors. Applies only to `Sub-task` types of issues. + Parent *string `json:"parent,omitempty"` + + // Priority The priority of the incident in Jira and ServiceNow SecOps connectors. + Priority *string `json:"priority,omitempty"` + + // RuleName The rule name for Swimlane connectors. + RuleName *string `json:"ruleName,omitempty"` + + // Severity The severity of the incident for ServiceNow ITSM, Swimlane, and TheHive connectors. In TheHive connectors, the severity value ranges from 1 (low) to 4 (critical) with a default value of 2 (medium). + Severity *int `json:"severity,omitempty"` + + // ShortDescription A short description of the incident for ServiceNow ITSM and ServiceNow SecOps connectors. It is used for searching the contents of the knowledge base. + ShortDescription *string `json:"short_description,omitempty"` + + // SourceIp A list of source IP addresses related to the security incident for ServiceNow SecOps connectors. The IPs are added as observables to the security incident. + SourceIp *RunPushtoservice_SubActionParams_Incident_SourceIp `json:"source_ip,omitempty"` + + // Status The status of the incident for Webhook - Case Management connectors. + Status *string `json:"status,omitempty"` + + // Subcategory The subcategory of the incident for ServiceNow ITSM and ServiceNow SecOps connectors. + Subcategory *string `json:"subcategory,omitempty"` + + // Summary A summary of the incident for Jira connectors. + Summary *string `json:"summary,omitempty"` + + // Tags A list of tags for TheHive and Webhook - Case Management connectors. + Tags *[]string `json:"tags,omitempty"` + + // Title A title for the incident for Jira, TheHive, and Webhook - Case Management connectors. It is used for searching the contents of the knowledge base. + Title *string `json:"title,omitempty"` + + // Tlp The traffic light protocol designation for the incident in TheHive connectors. Valid values include: 0 (clear), 1 (green), 2 (amber), 3 (amber and strict), and 4 (red). + Tlp *int `json:"tlp,omitempty"` + + // Urgency The urgency of the incident for ServiceNow ITSM connectors. + Urgency *string `json:"urgency,omitempty"` + } `json:"incident,omitempty"` + } `json:"subActionParams"` +} + +// RunPushtoserviceSubAction The action to test. +type RunPushtoserviceSubAction string + +// RunPushtoserviceSubActionParamsIncidentDestIp0 defines model for . +type RunPushtoserviceSubActionParamsIncidentDestIp0 = string + +// RunPushtoserviceSubActionParamsIncidentDestIp1 defines model for . +type RunPushtoserviceSubActionParamsIncidentDestIp1 = []string + +// RunPushtoservice_SubActionParams_Incident_DestIp A list of destination IP addresses related to the security incident for ServiceNow SecOps connectors. The IPs are added as observables to the security incident. +type RunPushtoservice_SubActionParams_Incident_DestIp struct { + union json.RawMessage +} + +// RunPushtoserviceSubActionParamsIncidentMalwareHash0 defines model for . +type RunPushtoserviceSubActionParamsIncidentMalwareHash0 = string + +// RunPushtoserviceSubActionParamsIncidentMalwareHash1 defines model for . +type RunPushtoserviceSubActionParamsIncidentMalwareHash1 = []string + +// RunPushtoservice_SubActionParams_Incident_MalwareHash A list of malware hashes related to the security incident for ServiceNow SecOps connectors. The hashes are added as observables to the security incident. +type RunPushtoservice_SubActionParams_Incident_MalwareHash struct { + union json.RawMessage +} + +// RunPushtoserviceSubActionParamsIncidentSourceIp0 defines model for . +type RunPushtoserviceSubActionParamsIncidentSourceIp0 = string + +// RunPushtoserviceSubActionParamsIncidentSourceIp1 defines model for . +type RunPushtoserviceSubActionParamsIncidentSourceIp1 = []string + +// RunPushtoservice_SubActionParams_Incident_SourceIp A list of source IP addresses related to the security incident for ServiceNow SecOps connectors. The IPs are added as observables to the security incident. +type RunPushtoservice_SubActionParams_Incident_SourceIp struct { + union json.RawMessage +} + +// RunTriggerPagerduty Test an action that triggers a PagerDuty alert. +type RunTriggerPagerduty struct { + // Class The class or type of the event. + Class *string `json:"class,omitempty"` + + // Component The component of the source machine that is responsible for the event. + Component *string `json:"component,omitempty"` + + // CustomDetails Additional details to add to the event. + CustomDetails *map[string]interface{} `json:"customDetails,omitempty"` + + // DedupKey All actions sharing this key will be associated with the same PagerDuty alert. This value is used to correlate trigger and resolution. + DedupKey *string `json:"dedupKey,omitempty"` + + // EventAction The type of event. + EventAction RunTriggerPagerdutyEventAction `json:"eventAction"` + + // Group The logical grouping of components of a service. + Group *string `json:"group,omitempty"` + + // Links A list of links to add to the event. + Links *[]struct { + // Href The URL for the link. + Href *string `json:"href,omitempty"` + + // Text A plain text description of the purpose of the link. + Text *string `json:"text,omitempty"` + } `json:"links,omitempty"` + + // Severity The severity of the event on the affected system. + Severity *RunTriggerPagerdutySeverity `json:"severity,omitempty"` + + // Source The affected system, such as a hostname or fully qualified domain name. Defaults to the Kibana saved object id of the action. + Source *string `json:"source,omitempty"` + + // Summary A summery of the event. + Summary *string `json:"summary,omitempty"` + + // Timestamp An ISO-8601 timestamp that indicates when the event was detected or generated. + Timestamp *time.Time `json:"timestamp,omitempty"` +} + +// RunTriggerPagerdutyEventAction The type of event. +type RunTriggerPagerdutyEventAction string + +// RunTriggerPagerdutySeverity The severity of the event on the affected system. +type RunTriggerPagerdutySeverity string + +// RunValidchannelid Retrieves information about a valid Slack channel identifier. It is applicable only when the connector type is `.slack_api`. +type RunValidchannelid struct { + // SubAction The action to test. + SubAction RunValidchannelidSubAction `json:"subAction"` + SubActionParams struct { + // ChannelId The Slack channel identifier. + ChannelId string `json:"channelId"` + } `json:"subActionParams"` +} + +// RunValidchannelidSubAction The action to test. +type RunValidchannelidSubAction string + +// SentineloneConfig Defines properties for connectors when type is `.sentinelone`. +type SentineloneConfig struct { + // Url The SentinelOne tenant URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + Url string `json:"url"` +} + +// SentineloneSecrets Defines secrets for connectors when type is `.sentinelone`. +type SentineloneSecrets struct { + // Token The A SentinelOne API token. + Token string `json:"token"` +} + +// ServerHost defines model for server_host. +type ServerHost struct { + HostUrls []string `json:"host_urls"` + Id string `json:"id"` + IsDefault *bool `json:"is_default,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + EsKey *ServerHost_Secrets_Ssl_EsKey `json:"es_key,omitempty"` + Key *ServerHost_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + ClientAuth *ServerHostSslClientAuth `json:"client_auth,omitempty"` + EsCertificate *string `json:"es_certificate,omitempty"` + EsCertificateAuthorities *[]string `json:"es_certificate_authorities,omitempty"` + EsKey *string `json:"es_key,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` +} + +// ServerHostSecretsSslEsKey0 defines model for . +type ServerHostSecretsSslEsKey0 struct { + Id string `json:"id"` +} + +// ServerHostSecretsSslEsKey1 defines model for . +type ServerHostSecretsSslEsKey1 = string + +// ServerHost_Secrets_Ssl_EsKey defines model for ServerHost.Secrets.Ssl.EsKey. +type ServerHost_Secrets_Ssl_EsKey struct { + union json.RawMessage +} + +// ServerHostSecretsSslKey0 defines model for . +type ServerHostSecretsSslKey0 struct { + Id string `json:"id"` +} + +// ServerHostSecretsSslKey1 defines model for . +type ServerHostSecretsSslKey1 = string + +// ServerHost_Secrets_Ssl_Key defines model for ServerHost.Secrets.Ssl.Key. +type ServerHost_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// ServerHostSslClientAuth defines model for ServerHost.Ssl.ClientAuth. +type ServerHostSslClientAuth string + +// ServicenowConfig Defines properties for connectors when type is `.servicenow`. +type ServicenowConfig struct { + // ApiUrl The ServiceNow instance URL. + ApiUrl string `json:"apiUrl"` + + // ClientId The client ID assigned to your OAuth application. This property is required when `isOAuth` is `true`. + ClientId *string `json:"clientId,omitempty"` + + // IsOAuth The type of authentication to use. The default value is false, which means basic authentication is used instead of open authorization (OAuth). + IsOAuth *bool `json:"isOAuth,omitempty"` + + // JwtKeyId The key identifier assigned to the JWT verifier map of your OAuth application. This property is required when `isOAuth` is `true`. + JwtKeyId *string `json:"jwtKeyId,omitempty"` + + // UserIdentifierValue The identifier to use for OAuth authentication. This identifier should be the user field you selected when you created an OAuth JWT API endpoint for external clients in your ServiceNow instance. For example, if the selected user field is `Email`, the user identifier should be the user's email address. This property is required when `isOAuth` is `true`. + UserIdentifierValue *string `json:"userIdentifierValue,omitempty"` + + // UsesTableApi Determines whether the connector uses the Table API or the Import Set API. This property is supported only for ServiceNow ITSM and ServiceNow SecOps connectors. NOTE: If this property is set to `false`, the Elastic application should be installed in ServiceNow. + UsesTableApi *bool `json:"usesTableApi,omitempty"` +} + +// ServicenowItomConfig Defines properties for connectors when type is `.servicenow-itom`. +type ServicenowItomConfig struct { + // ApiUrl The ServiceNow instance URL. + ApiUrl string `json:"apiUrl"` + + // ClientId The client ID assigned to your OAuth application. This property is required when `isOAuth` is `true`. + ClientId *string `json:"clientId,omitempty"` + + // IsOAuth The type of authentication to use. The default value is false, which means basic authentication is used instead of open authorization (OAuth). + IsOAuth *bool `json:"isOAuth,omitempty"` + + // JwtKeyId The key identifier assigned to the JWT verifier map of your OAuth application. This property is required when `isOAuth` is `true`. + JwtKeyId *string `json:"jwtKeyId,omitempty"` + + // UserIdentifierValue The identifier to use for OAuth authentication. This identifier should be the user field you selected when you created an OAuth JWT API endpoint for external clients in your ServiceNow instance. For example, if the selected user field is `Email`, the user identifier should be the user's email address. This property is required when `isOAuth` is `true`. + UserIdentifierValue *string `json:"userIdentifierValue,omitempty"` +} + +// ServicenowSecrets Defines secrets for connectors when type is `.servicenow`, `.servicenow-sir`, or `.servicenow-itom`. +type ServicenowSecrets struct { + // ClientSecret The client secret assigned to your OAuth application. This property is required when `isOAuth` is `true`. + ClientSecret *string `json:"clientSecret,omitempty"` + + // Password The password for HTTP basic authentication. This property is required when `isOAuth` is `false`. + Password *string `json:"password,omitempty"` + + // PrivateKey The RSA private key that you created for use in ServiceNow. This property is required when `isOAuth` is `true`. + PrivateKey *string `json:"privateKey,omitempty"` + + // PrivateKeyPassword The password for the RSA private key. This property is required when `isOAuth` is `true` and you set a password on your private key. + PrivateKeyPassword *string `json:"privateKeyPassword,omitempty"` + + // Username The username for HTTP basic authentication. This property is required when `isOAuth` is `false`. + Username *string `json:"username,omitempty"` +} + +// Size The number of documents to pass to the configured actions when the threshold condition is met. +type Size = int + +// SlackApiConfig Defines properties for connectors when type is `.slack_api`. +type SlackApiConfig struct { + // AllowedChannels A list of valid Slack channels. + AllowedChannels *[]struct { + // Id The Slack channel ID. + Id string `json:"id"` + + // Name The Slack channel name. + Name string `json:"name"` + } `json:"allowedChannels,omitempty"` +} + +// SlackApiSecrets Defines secrets for connectors when type is `.slack`. +type SlackApiSecrets struct { + // Token Slack bot user OAuth token. + Token string `json:"token"` +} + +// SwimlaneConfig Defines properties for connectors when type is `.swimlane`. +type SwimlaneConfig struct { + // ApiUrl The Swimlane instance URL. + ApiUrl string `json:"apiUrl"` + + // AppId The Swimlane application ID. + AppId string `json:"appId"` + + // ConnectorType The type of connector. Valid values are `all`, `alerts`, and `cases`. + ConnectorType SwimlaneConfigConnectorType `json:"connectorType"` + + // Mappings The field mapping. + Mappings *struct { + // AlertIdConfig Mapping for the alert ID. + AlertIdConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"alertIdConfig,omitempty"` + + // CaseIdConfig Mapping for the case ID. + CaseIdConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"caseIdConfig,omitempty"` + + // CaseNameConfig Mapping for the case name. + CaseNameConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"caseNameConfig,omitempty"` + + // CommentsConfig Mapping for the case comments. + CommentsConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"commentsConfig,omitempty"` + + // DescriptionConfig Mapping for the case description. + DescriptionConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"descriptionConfig,omitempty"` + + // RuleNameConfig Mapping for the name of the alert's rule. + RuleNameConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"ruleNameConfig,omitempty"` + + // SeverityConfig Mapping for the severity. + SeverityConfig *struct { + // FieldType The type of field in Swimlane. + FieldType string `json:"fieldType"` + + // Id The identifier for the field in Swimlane. + Id string `json:"id"` + + // Key The key for the field in Swimlane. + Key string `json:"key"` + + // Name The name of the field in Swimlane. + Name string `json:"name"` + } `json:"severityConfig,omitempty"` + } `json:"mappings,omitempty"` +} + +// SwimlaneConfigConnectorType The type of connector. Valid values are `all`, `alerts`, and `cases`. +type SwimlaneConfigConnectorType string + +// SwimlaneSecrets Defines secrets for connectors when type is `.swimlane`. +type SwimlaneSecrets struct { + // ApiToken Swimlane API authentication token. + ApiToken *string `json:"apiToken,omitempty"` +} + +// TeamsSecrets Defines secrets for connectors when type is `.teams`. +type TeamsSecrets struct { + // WebhookUrl The URL of the incoming webhook. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + WebhookUrl string `json:"webhookUrl"` +} + +// Termfield The names of up to four fields that are used for grouping the aggregation. This property is required when `groupBy` is `top`. +type Termfield struct { + union json.RawMessage +} + +// Termfield0 defines model for . +type Termfield0 = string + +// Termfield1 defines model for . +type Termfield1 = []string + +// Termsize This property is required when `groupBy` is `top`. It specifies the number of groups to check against the threshold and therefore limits the number of alerts on high cardinality fields. +type Termsize = int + +// ThehiveConfig Defines configuration properties for connectors when type is `.thehive`. +type ThehiveConfig struct { + // Organisation The organisation in TheHive that will contain the alerts or cases. By default, the connector uses the default organisation of the user account that created the API key. + Organisation *string `json:"organisation,omitempty"` + + // Url The instance URL in TheHive. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + Url string `json:"url"` +} + +// ThehiveSecrets Defines secrets for connectors when type is `.thehive`. +type ThehiveSecrets struct { + // ApiKey The API key for authentication in TheHive. + ApiKey string `json:"apiKey"` +} + +// Threshold The threshold value that is used with the `thresholdComparator`. If the `thresholdComparator` is `between` or `notBetween`, you must specify the boundary values. +type Threshold = []int + +// Thresholdcomparator The comparison function for the threshold. For example, "is above", "is above or equals", "is below", "is below or equals", "is between", and "is not between". +type Thresholdcomparator string + +// Timefield The field that is used to calculate the time window. +type Timefield = string + +// Timewindowsize The size of the time window (in `timeWindowUnit` units), which determines how far back to search for documents. Generally it should be a value higher than the rule check interval to avoid gaps in detection. +type Timewindowsize = int + +// Timewindowunit The type of units for the time window: seconds, minutes, hours, or days. +type Timewindowunit string + +// TinesConfig Defines properties for connectors when type is `.tines`. +type TinesConfig struct { + // Url The Tines tenant URL. If you are using the `xpack.actions.allowedHosts` setting, make sure this hostname is added to the allowed hosts. + Url string `json:"url"` +} + +// TinesSecrets Defines secrets for connectors when type is `.tines`. +type TinesSecrets struct { + // Email The email used to sign in to Tines. + Email string `json:"email"` + + // Token The Tines API token. + Token string `json:"token"` +} + +// TorqConfig Defines properties for connectors when type is `.torq`. +type TorqConfig struct { + // WebhookIntegrationUrl The endpoint URL of the Elastic Security integration in Torq. + WebhookIntegrationUrl string `json:"webhookIntegrationUrl"` +} + +// TorqSecrets Defines secrets for connectors when type is `.torq`. +type TorqSecrets struct { + // Token The secret of the webhook authentication header. + Token string `json:"token"` +} + +// UpdateConnectorConfig The connector configuration details. +type UpdateConnectorConfig struct { + AdditionalProperties map[string]interface{} `json:"-"` + union json.RawMessage +} + +// UpdateConnectorSecrets defines model for update_connector_secrets. +type UpdateConnectorSecrets struct { + AdditionalProperties map[string]interface{} `json:"-"` + union json.RawMessage +} + +// UpdateOutputElasticsearch defines model for update_output_elasticsearch. +type UpdateOutputElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` + Hosts *[]string `json:"hosts,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name *string `json:"name,omitempty"` + Preset *UpdateOutputElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + Key *UpdateOutputElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Shipper *UpdateOutputShipper `json:"shipper,omitempty"` + Ssl *UpdateOutputSsl `json:"ssl,omitempty"` + Type *UpdateOutputElasticsearchType `json:"type,omitempty"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` +} + +// UpdateOutputElasticsearchPreset defines model for UpdateOutputElasticsearch.Preset. +type UpdateOutputElasticsearchPreset string + +// UpdateOutputElasticsearchSecretsSslKey0 defines model for . +type UpdateOutputElasticsearchSecretsSslKey0 struct { + Id string `json:"id"` +} + +// UpdateOutputElasticsearchSecretsSslKey1 defines model for . +type UpdateOutputElasticsearchSecretsSslKey1 = string + +// UpdateOutputElasticsearch_Secrets_Ssl_Key defines model for UpdateOutputElasticsearch.Secrets.Ssl.Key. +type UpdateOutputElasticsearch_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// UpdateOutputElasticsearchType defines model for UpdateOutputElasticsearch.Type. +type UpdateOutputElasticsearchType string + +// UpdateOutputKafka defines model for update_output_kafka. +type UpdateOutputKafka struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + AuthType *UpdateOutputKafkaAuthType `json:"auth_type,omitempty"` + BrokerTimeout *float32 `json:"broker_timeout,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ClientId *string `json:"client_id,omitempty"` + Compression *UpdateOutputKafkaCompression `json:"compression,omitempty"` + CompressionLevel interface{} `json:"compression_level"` + ConfigYaml *string `json:"config_yaml,omitempty"` + ConnectionType interface{} `json:"connection_type"` + Hash *struct { + Hash *string `json:"hash,omitempty"` + Random *bool `json:"random,omitempty"` + } `json:"hash,omitempty"` + Headers *[]struct { + Key string `json:"key"` + Value string `json:"value"` + } `json:"headers,omitempty"` + Hosts *[]string `json:"hosts,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Key *string `json:"key,omitempty"` + Name string `json:"name"` + Partition *UpdateOutputKafkaPartition `json:"partition,omitempty"` + Password interface{} `json:"password"` + ProxyId *string `json:"proxy_id,omitempty"` + Random *struct { + GroupEvents *float32 `json:"group_events,omitempty"` + } `json:"random,omitempty"` + RequiredAcks *UpdateOutputKafkaRequiredAcks `json:"required_acks,omitempty"` + RoundRobin *struct { + GroupEvents *float32 `json:"group_events,omitempty"` + } `json:"round_robin,omitempty"` + Sasl *struct { + Mechanism *UpdateOutputKafkaSaslMechanism `json:"mechanism,omitempty"` + } `json:"sasl,omitempty"` + Secrets *struct { + Password *UpdateOutputKafka_Secrets_Password `json:"password,omitempty"` + Ssl *struct { + Key UpdateOutputKafka_Secrets_Ssl_Key `json:"key"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Shipper *UpdateOutputShipper `json:"shipper,omitempty"` + Ssl *UpdateOutputSsl `json:"ssl,omitempty"` + Timeout *float32 `json:"timeout,omitempty"` + Topic *string `json:"topic,omitempty"` + Type *UpdateOutputKafkaType `json:"type,omitempty"` + Username interface{} `json:"username"` + Version *string `json:"version,omitempty"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` +} + +// UpdateOutputKafkaAuthType defines model for UpdateOutputKafka.AuthType. +type UpdateOutputKafkaAuthType string + +// UpdateOutputKafkaCompression defines model for UpdateOutputKafka.Compression. +type UpdateOutputKafkaCompression string + +// UpdateOutputKafkaPartition defines model for UpdateOutputKafka.Partition. +type UpdateOutputKafkaPartition string + +// UpdateOutputKafkaRequiredAcks defines model for UpdateOutputKafka.RequiredAcks. +type UpdateOutputKafkaRequiredAcks int + +// UpdateOutputKafkaSaslMechanism defines model for UpdateOutputKafka.Sasl.Mechanism. +type UpdateOutputKafkaSaslMechanism string + +// UpdateOutputKafkaSecretsPassword0 defines model for . +type UpdateOutputKafkaSecretsPassword0 struct { + Id string `json:"id"` +} + +// UpdateOutputKafkaSecretsPassword1 defines model for . +type UpdateOutputKafkaSecretsPassword1 = string + +// UpdateOutputKafka_Secrets_Password defines model for UpdateOutputKafka.Secrets.Password. +type UpdateOutputKafka_Secrets_Password struct { + union json.RawMessage +} + +// UpdateOutputKafkaSecretsSslKey0 defines model for . +type UpdateOutputKafkaSecretsSslKey0 struct { + Id string `json:"id"` +} + +// UpdateOutputKafkaSecretsSslKey1 defines model for . +type UpdateOutputKafkaSecretsSslKey1 = string + +// UpdateOutputKafka_Secrets_Ssl_Key defines model for UpdateOutputKafka.Secrets.Ssl.Key. +type UpdateOutputKafka_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// UpdateOutputKafkaType defines model for UpdateOutputKafka.Type. +type UpdateOutputKafkaType string + +// UpdateOutputLogstash defines model for update_output_logstash. +type UpdateOutputLogstash struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` + Hosts *[]string `json:"hosts,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name *string `json:"name,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + Key *UpdateOutputLogstash_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Shipper *UpdateOutputShipper `json:"shipper,omitempty"` + Ssl *UpdateOutputSsl `json:"ssl,omitempty"` + Type *UpdateOutputLogstashType `json:"type,omitempty"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` +} + +// UpdateOutputLogstashSecretsSslKey0 defines model for . +type UpdateOutputLogstashSecretsSslKey0 struct { + Id string `json:"id"` +} + +// UpdateOutputLogstashSecretsSslKey1 defines model for . +type UpdateOutputLogstashSecretsSslKey1 = string + +// UpdateOutputLogstash_Secrets_Ssl_Key defines model for UpdateOutputLogstash.Secrets.Ssl.Key. +type UpdateOutputLogstash_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// UpdateOutputLogstashType defines model for UpdateOutputLogstash.Type. +type UpdateOutputLogstashType string + +// UpdateOutputRemoteElasticsearch defines model for update_output_remote_elasticsearch. +type UpdateOutputRemoteElasticsearch struct { + AllowEdit *[]string `json:"allow_edit,omitempty"` + CaSha256 *string `json:"ca_sha256,omitempty"` + CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` + ConfigYaml *string `json:"config_yaml,omitempty"` + Hosts *[]string `json:"hosts,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + KibanaApiKey *string `json:"kibana_api_key,omitempty"` + KibanaUrl *string `json:"kibana_url,omitempty"` + Name *string `json:"name,omitempty"` + Preset *UpdateOutputRemoteElasticsearchPreset `json:"preset,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + ServiceToken *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken `json:"service_token,omitempty"` + Ssl *struct { + Key *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + ServiceToken *string `json:"service_token,omitempty"` + Shipper *UpdateOutputShipper `json:"shipper,omitempty"` + Ssl *UpdateOutputSsl `json:"ssl,omitempty"` + SyncIntegrations *bool `json:"sync_integrations,omitempty"` + SyncUninstalledIntegrations *bool `json:"sync_uninstalled_integrations,omitempty"` + Type *UpdateOutputRemoteElasticsearchType `json:"type,omitempty"` + WriteToLogsStreams *bool `json:"write_to_logs_streams,omitempty"` +} + +// UpdateOutputRemoteElasticsearchPreset defines model for UpdateOutputRemoteElasticsearch.Preset. +type UpdateOutputRemoteElasticsearchPreset string + +// UpdateOutputRemoteElasticsearchSecretsServiceToken0 defines model for . +type UpdateOutputRemoteElasticsearchSecretsServiceToken0 struct { + Id string `json:"id"` +} + +// UpdateOutputRemoteElasticsearchSecretsServiceToken1 defines model for . +type UpdateOutputRemoteElasticsearchSecretsServiceToken1 = string + +// UpdateOutputRemoteElasticsearch_Secrets_ServiceToken defines model for UpdateOutputRemoteElasticsearch.Secrets.ServiceToken. +type UpdateOutputRemoteElasticsearch_Secrets_ServiceToken struct { + union json.RawMessage +} + +// UpdateOutputRemoteElasticsearchSecretsSslKey0 defines model for . +type UpdateOutputRemoteElasticsearchSecretsSslKey0 struct { + Id string `json:"id"` +} + +// UpdateOutputRemoteElasticsearchSecretsSslKey1 defines model for . +type UpdateOutputRemoteElasticsearchSecretsSslKey1 = string + +// UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key defines model for UpdateOutputRemoteElasticsearch.Secrets.Ssl.Key. +type UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// UpdateOutputRemoteElasticsearchType defines model for UpdateOutputRemoteElasticsearch.Type. +type UpdateOutputRemoteElasticsearchType string + +// UpdateOutputShipper defines model for update_output_shipper. +type UpdateOutputShipper struct { + CompressionLevel *float32 `json:"compression_level,omitempty"` + DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` + DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` + DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` + DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` + DiskQueuePath *string `json:"disk_queue_path,omitempty"` + Loadbalance *bool `json:"loadbalance,omitempty"` + MaxBatchBytes *float32 `json:"max_batch_bytes,omitempty"` + MemQueueEvents *float32 `json:"mem_queue_events,omitempty"` + QueueFlushTimeout *float32 `json:"queue_flush_timeout,omitempty"` +} + +// UpdateOutputSsl defines model for update_output_ssl. +type UpdateOutputSsl struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + VerificationMode *UpdateOutputSslVerificationMode `json:"verification_mode,omitempty"` +} + +// UpdateOutputSslVerificationMode defines model for UpdateOutputSsl.VerificationMode. +type UpdateOutputSslVerificationMode string + +// UpdateOutputUnion defines model for update_output_union. +type UpdateOutputUnion struct { + union json.RawMessage +} + +// VerificationMode Controls the verification of certificates. Use `full` to validate that the certificate has an issue date within the `not_before` and `not_after` dates, chains to a trusted certificate authority (CA), and has a hostname or IP address that matches the names within the certificate. Use `certificate` to validate the certificate and verify that it is signed by a trusted authority; this option does not check the certificate hostname. Use `none` to skip certificate validation. +type VerificationMode string + +// WebhookConfig Defines properties for connectors when type is `.webhook`. +type WebhookConfig struct { + // AuthType The type of authentication to use: basic, SSL, or none. + AuthType *AuthType `json:"authType,omitempty"` + + // Ca A base64 encoded version of the certificate authority file that the connector can trust to sign and validate certificates. This option is available for all authentication types. + Ca *Ca `json:"ca,omitempty"` + + // CertType If the `authType` is `webhook-authentication-ssl`, specifies whether the certificate authentication data is in a CRT and key file format or a PFX file format. + CertType *CertType `json:"certType,omitempty"` + + // HasAuth If true, a username and password for login type authentication must be provided. + HasAuth *HasAuth `json:"hasAuth,omitempty"` + + // Headers A set of key-value pairs sent as headers with the request. + Headers *map[string]interface{} `json:"headers,omitempty"` + + // Method The HTTP request method, either `post` or `put`. + Method *WebhookConfigMethod `json:"method,omitempty"` + + // Url The request URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. + Url *string `json:"url,omitempty"` + + // VerificationMode Controls the verification of certificates. Use `full` to validate that the certificate has an issue date within the `not_before` and `not_after` dates, chains to a trusted certificate authority (CA), and has a hostname or IP address that matches the names within the certificate. Use `certificate` to validate the certificate and verify that it is signed by a trusted authority; this option does not check the certificate hostname. Use `none` to skip certificate validation. + VerificationMode *VerificationMode `json:"verificationMode,omitempty"` +} + +// WebhookConfigMethod The HTTP request method, either `post` or `put`. +type WebhookConfigMethod string + +// WebhookSecrets Defines secrets for connectors when type is `.webhook`. +type WebhookSecrets struct { + // Crt If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the CRT or CERT file. + Crt *Crt `json:"crt,omitempty"` + + // Key If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the KEY file. + Key *Key `json:"key,omitempty"` + + // Password The password for HTTP basic authentication or the passphrase for the SSL certificate files. If `hasAuth` is set to `true` and `authType` is `webhook-authentication-basic`, this property is required. + Password *string `json:"password,omitempty"` + + // Pfx If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-pfx`, it is a base64 encoded version of the PFX or P12 file. + Pfx *Pfx `json:"pfx,omitempty"` + + // User The username for HTTP basic authentication. If `hasAuth` is set to `true` and `authType` is `webhook-authentication-basic`, this property is required. + User *string `json:"user,omitempty"` +} + +// XmattersConfig Defines properties for connectors when type is `.xmatters`. +type XmattersConfig struct { + // ConfigUrl The request URL for the Elastic Alerts trigger in xMatters. It is applicable only when `usesBasic` is `true`. + ConfigUrl *string `json:"configUrl,omitempty"` + + // UsesBasic Specifies whether the connector uses HTTP basic authentication (`true`) or URL authentication (`false`). + UsesBasic *bool `json:"usesBasic,omitempty"` +} + +// XmattersSecrets Defines secrets for connectors when type is `.xmatters`. +type XmattersSecrets struct { + // Password A user name for HTTP basic authentication. It is applicable only when `usesBasic` is `true`. + Password *string `json:"password,omitempty"` + + // SecretsUrl The request URL for the Elastic Alerts trigger in xMatters with the API key included in the URL. It is applicable only when `usesBasic` is `false`. + SecretsUrl *string `json:"secretsUrl,omitempty"` + + // User A password for HTTP basic authentication. It is applicable only when `usesBasic` is `true`. + User *string `json:"user,omitempty"` +} + +// APMUIElasticApiVersion defines model for APM_UI_elastic_api_version. +type APMUIElasticApiVersion string + +// CasesAlertId defines model for Cases_alert_id. +type CasesAlertId = string + +// CasesAssigneesFilter defines model for Cases_assignees_filter. +type CasesAssigneesFilter struct { + union json.RawMessage +} + +// CasesCaseId defines model for Cases_case_id. +type CasesCaseId = string + +// CasesCategory defines model for Cases_category. +type CasesCategory struct { + union json.RawMessage +} + +// CasesCommentId defines model for Cases_comment_id. +type CasesCommentId = string + +// CasesConfigurationId defines model for Cases_configuration_id. +type CasesConfigurationId = string + +// CasesConnectorId defines model for Cases_connector_id. +type CasesConnectorId = string + +// CasesDefaultSearchOperator defines model for Cases_defaultSearchOperator. +type CasesDefaultSearchOperator = string + +// CasesFrom defines model for Cases_from. +type CasesFrom = string + +// CasesIds defines model for Cases_ids. +type CasesIds = []string + +// CasesOwnerFilter defines model for Cases_owner_filter. +type CasesOwnerFilter struct { + union json.RawMessage +} + +// CasesPageIndex defines model for Cases_page_index. +type CasesPageIndex = int + +// CasesPageSize defines model for Cases_page_size. +type CasesPageSize = int + +// CasesReporters defines model for Cases_reporters. +type CasesReporters struct { + union json.RawMessage +} + +// CasesSearch defines model for Cases_search. +type CasesSearch = string + +// CasesSearchFields defines model for Cases_searchFields. +type CasesSearchFields struct { + union json.RawMessage +} + +// CasesSeverity defines model for Cases_severity. +type CasesSeverity string + +// CasesSortField defines model for Cases_sortField. +type CasesSortField string + +// CasesSortOrder defines model for Cases_sort_order. +type CasesSortOrder string + +// CasesStatus defines model for Cases_status. +type CasesStatus string + +// CasesTags defines model for Cases_tags. +type CasesTags struct { + union json.RawMessage +} + +// CasesTo defines model for Cases_to. +type CasesTo = string + +// CasesUserActionTypes defines model for Cases_user_action_types. +type CasesUserActionTypes = []string + +// DataViewsFieldName defines model for Data_views_field_name. +type DataViewsFieldName = string + +// DataViewsViewId defines model for Data_views_view_id. +type DataViewsViewId = string + +// MachineLearningAPIsSimulateParam defines model for Machine_learning_APIs_simulateParam. +type MachineLearningAPIsSimulateParam = bool + +// SLOsSloId defines model for SLOs_slo_id. +type SLOsSloId = string + +// SLOsSpaceId defines model for SLOs_space_id. +type SLOsSpaceId = string + +// SavedObjectsSavedObjectId defines model for Saved_objects_saved_object_id. +type SavedObjectsSavedObjectId = string + +// SavedObjectsSavedObjectType defines model for Saved_objects_saved_object_type. +type SavedObjectsSavedObjectType = string + +// ShortURLAPIsIdParam defines model for Short_URL_APIs_idParam. +type ShortURLAPIsIdParam = string + +// SpaceId defines model for spaceId. +type SpaceId = string + +// PostActionsConnectorIdExecuteJSONBody defines parameters for PostActionsConnectorIdExecute. +type PostActionsConnectorIdExecuteJSONBody struct { + Params PostActionsConnectorIdExecuteJSONBody_Params `json:"params"` +} + +// PostActionsConnectorIdExecuteJSONBody_Params defines parameters for PostActionsConnectorIdExecute. +type PostActionsConnectorIdExecuteJSONBody_Params struct { + AdditionalProperties map[string]interface{} `json:"-"` + union json.RawMessage +} + +// GetActionsConnectorTypesParams defines parameters for GetActionsConnectorTypes. +type GetActionsConnectorTypesParams struct { + // FeatureId A filter to limit the retrieved connector types to those that support a specific feature (such as alerting or cases). + FeatureId *string `form:"feature_id,omitempty" json:"feature_id,omitempty"` +} + +// PostAlertingRuleIdJSONBody defines parameters for PostAlertingRuleId. +type PostAlertingRuleIdJSONBody struct { + Actions *[]struct { + // AlertsFilter Conditions that affect whether the action runs. If you specify multiple conditions, all conditions must be met for the action to run. For example, if an alert occurs within the specified time frame and matches the query, the action runs. + AlertsFilter *struct { + Query *struct { + // Dsl A filter written in Elasticsearch Query Domain Specific Language (DSL). + Dsl *string `json:"dsl,omitempty"` + + // Filters A filter written in Elasticsearch Query Domain Specific Language (DSL) as defined in the `kbn-es-query` package. + Filters []struct { + State *struct { + // Store A filter can be either specific to an application context or applied globally. + Store PostAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore `json:"store"` + } `json:"$state,omitempty"` + Meta map[string]interface{} `json:"meta"` + Query *map[string]interface{} `json:"query,omitempty"` + } `json:"filters"` + + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query,omitempty"` + + // Timeframe Defines a period that limits whether the action runs. + Timeframe *struct { + // Days Defines the days of the week that the action can run, represented as an array of numbers. For example, `1` represents Monday. An empty array is equivalent to specifying all the days of the week. + Days []PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays `json:"days"` + + // Hours Defines the range of time in a day that the action can run. If the `start` value is `00:00` and the `end` value is `24:00`, actions be generated all day. + Hours struct { + // End The end of the time frame in 24-hour notation (`hh:mm`). + End string `json:"end"` + + // Start The start of the time frame in 24-hour notation (`hh:mm`). + Start string `json:"start"` + } `json:"hours"` + + // Timezone The ISO time zone for the `hours` values. Values such as `UTC` and `UTC+1` also work but lack built-in daylight savings time support and are not recommended. + Timezone string `json:"timezone"` + } `json:"timeframe,omitempty"` + } `json:"alerts_filter,omitempty"` + Frequency *struct { + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen PostAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen `json:"notify_when"` + + // Summary Indicates whether the action is a summary. + Summary bool `json:"summary"` + + // Throttle The throttle interval, which defines how often an alert generates repeated actions. It is specified in seconds, minutes, hours, or days and is applicable only if `notify_when` is set to `onThrottleInterval`. NOTE: You cannot specify the throttle interval at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle,omitempty"` + } `json:"frequency,omitempty"` + + // Group The group name, which affects when the action runs (for example, when the threshold is met or when the alert is recovered). Each rule type has a list of valid action group names. If you don't need to group actions, set to `default`. + Group *string `json:"group,omitempty"` + + // Id The identifier for the connector saved object. + Id string `json:"id"` + + // Params The parameters for the action, which are sent to the connector. The `params` are handled as Mustache templates and passed a default set of context. + Params *map[string]interface{} `json:"params,omitempty"` + + // UseAlertDataForTemplate Indicates whether to use alert data as a template. + UseAlertDataForTemplate *bool `json:"use_alert_data_for_template,omitempty"` + + // Uuid A universally unique identifier (UUID) for the action. + Uuid *string `json:"uuid,omitempty"` + } `json:"actions,omitempty"` + + // AlertDelay Indicates that an alert occurs only when the specified number of consecutive runs met the rule conditions. + AlertDelay *struct { + // Active The number of consecutive runs that must meet the rule conditions. + Active float32 `json:"active"` + } `json:"alert_delay,omitempty"` + Artifacts *struct { + Dashboards *[]struct { + Id string `json:"id"` + } `json:"dashboards,omitempty"` + InvestigationGuide *struct { + Blob string `json:"blob"` + } `json:"investigation_guide,omitempty"` + } `json:"artifacts,omitempty"` + + // Consumer The name of the application or feature that owns the rule. For example: `alerts`, `apm`, `discover`, `infrastructure`, `logs`, `metrics`, `ml`, `monitoring`, `securitySolution`, `siem`, `stackAlerts`, or `uptime`. + Consumer string `json:"consumer"` + + // Enabled Indicates whether you want to run the rule on an interval basis after it is created. + Enabled *bool `json:"enabled,omitempty"` + + // Flapping When flapping detection is turned on, alerts that switch quickly between active and recovered states are identified as “flapping” and notifications are reduced. + Flapping *struct { + // LookBackWindow The minimum number of runs in which the threshold must be met. + LookBackWindow float32 `json:"look_back_window"` + + // StatusChangeThreshold The minimum number of times an alert must switch states in the look back window. + StatusChangeThreshold float32 `json:"status_change_threshold"` + } `json:"flapping,omitempty"` + + // Name The name of the rule. While this name does not have to be unique, a distinctive name can help you identify a rule. + Name string `json:"name"` + + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen *PostAlertingRuleIdJSONBodyNotifyWhen `json:"notify_when,omitempty"` + + // Params The parameters for the rule. + Params *PostAlertingRuleIdJSONBody_Params `json:"params,omitempty"` + + // RuleTypeId The rule type identifier. + RuleTypeId string `json:"rule_type_id"` + + // Schedule The check interval, which specifies how frequently the rule conditions are checked. + Schedule struct { + // Interval The interval is specified in seconds, minutes, hours, or days. + Interval string `json:"interval"` + } `json:"schedule"` + + // Tags The tags for the rule. + Tags *[]string `json:"tags,omitempty"` + + // Throttle Use the `throttle` property in the action `frequency` object instead. The throttle interval, which defines how often an alert generates repeated actions. NOTE: You cannot specify the throttle interval at both the rule and action level. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle,omitempty"` +} + +// PostAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore defines parameters for PostAlertingRuleId. +type PostAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore string + +// PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays defines parameters for PostAlertingRuleId. +type PostAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays int + +// PostAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen defines parameters for PostAlertingRuleId. +type PostAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen string + +// PostAlertingRuleIdJSONBodyNotifyWhen defines parameters for PostAlertingRuleId. +type PostAlertingRuleIdJSONBodyNotifyWhen string + +// PostAlertingRuleIdJSONBody_Params defines parameters for PostAlertingRuleId. +type PostAlertingRuleIdJSONBody_Params struct { + AdditionalProperties map[string]interface{} `json:"-"` + union json.RawMessage +} + +// PutAlertingRuleIdJSONBody defines parameters for PutAlertingRuleId. +type PutAlertingRuleIdJSONBody struct { + Actions *[]struct { + AlertsFilter *struct { + Query *struct { + // Dsl A filter written in Elasticsearch Query Domain Specific Language (DSL). + Dsl *string `json:"dsl,omitempty"` + + // Filters A filter written in Elasticsearch Query Domain Specific Language (DSL) as defined in the `kbn-es-query` package. + Filters []struct { + State *struct { + // Store A filter can be either specific to an application context or applied globally. + Store PutAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore `json:"store"` + } `json:"$state,omitempty"` + Meta map[string]interface{} `json:"meta"` + Query *map[string]interface{} `json:"query,omitempty"` + } `json:"filters"` + + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query,omitempty"` + + // Timeframe Defines a period that limits whether the action runs. + Timeframe *struct { + // Days Defines the days of the week that the action can run, represented as an array of numbers. For example, `1` represents Monday. An empty array is equivalent to specifying all the days of the week. + Days []PutAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays `json:"days"` + + // Hours Defines the range of time in a day that the action can run. If the `start` value is `00:00` and the `end` value is `24:00`, actions be generated all day. + Hours struct { + // End The end of the time frame in 24-hour notation (`hh:mm`). + End string `json:"end"` + + // Start The start of the time frame in 24-hour notation (`hh:mm`). + Start string `json:"start"` + } `json:"hours"` + + // Timezone The ISO time zone for the `hours` values. Values such as `UTC` and `UTC+1` also work but lack built-in daylight savings time support and are not recommended. + Timezone string `json:"timezone"` + } `json:"timeframe,omitempty"` + } `json:"alerts_filter,omitempty"` + Frequency *struct { + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen PutAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen `json:"notify_when"` + + // Summary Indicates whether the action is a summary. + Summary bool `json:"summary"` + + // Throttle The throttle interval, which defines how often an alert generates repeated actions. It is specified in seconds, minutes, hours, or days and is applicable only if `notify_when` is set to `onThrottleInterval`. NOTE: You cannot specify the throttle interval at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle,omitempty"` + } `json:"frequency,omitempty"` + + // Group The group name, which affects when the action runs (for example, when the threshold is met or when the alert is recovered). Each rule type has a list of valid action group names. If you don't need to group actions, set to `default`. + Group *string `json:"group,omitempty"` + + // Id The identifier for the connector saved object. + Id string `json:"id"` + + // Params The parameters for the action, which are sent to the connector. The `params` are handled as Mustache templates and passed a default set of context. + Params *map[string]interface{} `json:"params,omitempty"` + + // UseAlertDataForTemplate Indicates whether to use alert data as a template. + UseAlertDataForTemplate *bool `json:"use_alert_data_for_template,omitempty"` + + // Uuid A universally unique identifier (UUID) for the action. + Uuid *string `json:"uuid,omitempty"` + } `json:"actions,omitempty"` + + // AlertDelay Indicates that an alert occurs only when the specified number of consecutive runs met the rule conditions. + AlertDelay *struct { + // Active The number of consecutive runs that must meet the rule conditions. + Active float32 `json:"active"` + } `json:"alert_delay,omitempty"` + Artifacts *struct { + Dashboards *[]struct { + Id string `json:"id"` + } `json:"dashboards,omitempty"` + InvestigationGuide *struct { + Blob string `json:"blob"` + } `json:"investigation_guide,omitempty"` + } `json:"artifacts,omitempty"` + + // Flapping When flapping detection is turned on, alerts that switch quickly between active and recovered states are identified as “flapping” and notifications are reduced. + Flapping *struct { + // LookBackWindow The minimum number of runs in which the threshold must be met. + LookBackWindow float32 `json:"look_back_window"` + + // StatusChangeThreshold The minimum number of times an alert must switch states in the look back window. + StatusChangeThreshold float32 `json:"status_change_threshold"` + } `json:"flapping,omitempty"` + + // Name The name of the rule. While this name does not have to be unique, a distinctive name can help you identify a rule. + Name string `json:"name"` + + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen *PutAlertingRuleIdJSONBodyNotifyWhen `json:"notify_when,omitempty"` + + // Params The parameters for the rule. + Params *map[string]interface{} `json:"params,omitempty"` + Schedule struct { + // Interval The interval is specified in seconds, minutes, hours, or days. + Interval string `json:"interval"` + } `json:"schedule"` + Tags *[]string `json:"tags,omitempty"` + + // Throttle Use the `throttle` property in the action `frequency` object instead. The throttle interval, which defines how often an alert generates repeated actions. NOTE: You cannot specify the throttle interval at both the rule and action level. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle,omitempty"` +} + +// PutAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore defines parameters for PutAlertingRuleId. +type PutAlertingRuleIdJSONBodyActionsAlertsFilterQueryFiltersStateStore string + +// PutAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays defines parameters for PutAlertingRuleId. +type PutAlertingRuleIdJSONBodyActionsAlertsFilterTimeframeDays int + +// PutAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen defines parameters for PutAlertingRuleId. +type PutAlertingRuleIdJSONBodyActionsFrequencyNotifyWhen string + +// PutAlertingRuleIdJSONBodyNotifyWhen defines parameters for PutAlertingRuleId. +type PutAlertingRuleIdJSONBodyNotifyWhen string + +// PostAlertingRuleIdDisableJSONBody defines parameters for PostAlertingRuleIdDisable. +type PostAlertingRuleIdDisableJSONBody struct { + // Untrack Defines whether this rule's alerts should be untracked. + Untrack *bool `json:"untrack,omitempty"` +} + +// PostAlertingRuleIdSnoozeScheduleJSONBody defines parameters for PostAlertingRuleIdSnoozeSchedule. +type PostAlertingRuleIdSnoozeScheduleJSONBody struct { + Schedule struct { + Custom *struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom,omitempty"` + } `json:"schedule"` +} + +// GetAlertingRulesFindParams defines parameters for GetAlertingRulesFind. +type GetAlertingRulesFindParams struct { + // PerPage The number of rules to return per page. + PerPage *float32 `form:"per_page,omitempty" json:"per_page,omitempty"` + + // Page The page number to return. + Page *float32 `form:"page,omitempty" json:"page,omitempty"` + + // Search An Elasticsearch simple_query_string query that filters the objects in the response. + Search *string `form:"search,omitempty" json:"search,omitempty"` + + // DefaultSearchOperator The default operator to use for the simple_query_string. + DefaultSearchOperator *GetAlertingRulesFindParamsDefaultSearchOperator `form:"default_search_operator,omitempty" json:"default_search_operator,omitempty"` + + // SearchFields The fields to perform the simple_query_string parsed query against. + SearchFields *struct { + union json.RawMessage + } `form:"search_fields,omitempty" json:"search_fields,omitempty"` + + // SortField Determines which field is used to sort the results. The field must exist in the `attributes` key of the response. + SortField *string `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder Determines the sort order. + SortOrder *GetAlertingRulesFindParamsSortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` + + // HasReference Filters the rules that have a relation with the reference objects with a specific type and identifier. + HasReference *struct { + Id string `json:"id"` + Type string `json:"type"` + } `form:"has_reference,omitempty" json:"has_reference,omitempty"` + + // Fields The fields to return in the `attributes` key of the response. + Fields *struct { + union json.RawMessage + } `form:"fields,omitempty" json:"fields,omitempty"` + + // Filter A KQL string that you filter with an attribute from your saved object. It should look like `savedObjectType.attributes.title: "myTitle"`. However, if you used a direct attribute of a saved object, such as `updatedAt`, you must define your filter, for example, `savedObjectType.updatedAt > 2018-12-22`. + Filter *string `form:"filter,omitempty" json:"filter,omitempty"` + FilterConsumers *[]string `form:"filter_consumers,omitempty" json:"filter_consumers,omitempty"` +} + +// GetAlertingRulesFindParamsDefaultSearchOperator defines parameters for GetAlertingRulesFind. +type GetAlertingRulesFindParamsDefaultSearchOperator string + +// GetAlertingRulesFindParamsSearchFields0 defines parameters for GetAlertingRulesFind. +type GetAlertingRulesFindParamsSearchFields0 = []string + +// GetAlertingRulesFindParamsSearchFields1 defines parameters for GetAlertingRulesFind. +type GetAlertingRulesFindParamsSearchFields1 = string + +// GetAlertingRulesFindParamsSortOrder defines parameters for GetAlertingRulesFind. +type GetAlertingRulesFindParamsSortOrder string + +// GetAlertingRulesFindParamsFields0 defines parameters for GetAlertingRulesFind. +type GetAlertingRulesFindParamsFields0 = []string + +// GetAlertingRulesFindParamsFields1 defines parameters for GetAlertingRulesFind. +type GetAlertingRulesFindParamsFields1 = string + +// CreateAgentKeyParams defines parameters for CreateAgentKey. +type CreateAgentKeyParams struct { + // ElasticApiVersion The version of the API to use + ElasticApiVersion CreateAgentKeyParamsElasticApiVersion `json:"elastic-api-version"` +} + +// CreateAgentKeyParamsElasticApiVersion defines parameters for CreateAgentKey. +type CreateAgentKeyParamsElasticApiVersion string + +// SaveApmServerSchemaJSONBody defines parameters for SaveApmServerSchema. +type SaveApmServerSchemaJSONBody struct { + // Schema Schema object + Schema *map[string]interface{} `json:"schema,omitempty"` +} + +// SaveApmServerSchemaParams defines parameters for SaveApmServerSchema. +type SaveApmServerSchemaParams struct { + // ElasticApiVersion The version of the API to use + ElasticApiVersion SaveApmServerSchemaParamsElasticApiVersion `json:"elastic-api-version"` +} + +// SaveApmServerSchemaParamsElasticApiVersion defines parameters for SaveApmServerSchema. +type SaveApmServerSchemaParamsElasticApiVersion string + +// CreateAnnotationParams defines parameters for CreateAnnotation. +type CreateAnnotationParams struct { + // ElasticApiVersion The version of the API to use + ElasticApiVersion CreateAnnotationParamsElasticApiVersion `json:"elastic-api-version"` +} + +// CreateAnnotationParamsElasticApiVersion defines parameters for CreateAnnotation. +type CreateAnnotationParamsElasticApiVersion string + +// GetAnnotationParams defines parameters for GetAnnotation. +type GetAnnotationParams struct { + // Environment The environment to filter annotations by + Environment *string `form:"environment,omitempty" json:"environment,omitempty"` + + // Start The start date for the search + Start *string `form:"start,omitempty" json:"start,omitempty"` + + // End The end date for the search + End *string `form:"end,omitempty" json:"end,omitempty"` + + // ElasticApiVersion The version of the API to use + ElasticApiVersion GetAnnotationParamsElasticApiVersion `json:"elastic-api-version"` +} + +// GetAnnotationParamsElasticApiVersion defines parameters for GetAnnotation. +type GetAnnotationParamsElasticApiVersion string + +// DeleteAgentConfigurationParams defines parameters for DeleteAgentConfiguration. +type DeleteAgentConfigurationParams struct { + // ElasticApiVersion The version of the API to use + ElasticApiVersion DeleteAgentConfigurationParamsElasticApiVersion `json:"elastic-api-version"` +} + +// DeleteAgentConfigurationParamsElasticApiVersion defines parameters for DeleteAgentConfiguration. +type DeleteAgentConfigurationParamsElasticApiVersion string + +// GetAgentConfigurationsParams defines parameters for GetAgentConfigurations. +type GetAgentConfigurationsParams struct { + // ElasticApiVersion The version of the API to use + ElasticApiVersion GetAgentConfigurationsParamsElasticApiVersion `json:"elastic-api-version"` +} + +// GetAgentConfigurationsParamsElasticApiVersion defines parameters for GetAgentConfigurations. +type GetAgentConfigurationsParamsElasticApiVersion string + +// CreateUpdateAgentConfigurationParams defines parameters for CreateUpdateAgentConfiguration. +type CreateUpdateAgentConfigurationParams struct { + // Overwrite If the config exists ?overwrite=true is required + Overwrite *bool `form:"overwrite,omitempty" json:"overwrite,omitempty"` + + // ElasticApiVersion The version of the API to use + ElasticApiVersion CreateUpdateAgentConfigurationParamsElasticApiVersion `json:"elastic-api-version"` +} + +// CreateUpdateAgentConfigurationParamsElasticApiVersion defines parameters for CreateUpdateAgentConfiguration. +type CreateUpdateAgentConfigurationParamsElasticApiVersion string + +// GetAgentNameForServiceParams defines parameters for GetAgentNameForService. +type GetAgentNameForServiceParams struct { + // ServiceName The name of the service + ServiceName string `form:"serviceName" json:"serviceName"` + + // ElasticApiVersion The version of the API to use + ElasticApiVersion GetAgentNameForServiceParamsElasticApiVersion `json:"elastic-api-version"` +} + +// GetAgentNameForServiceParamsElasticApiVersion defines parameters for GetAgentNameForService. +type GetAgentNameForServiceParamsElasticApiVersion string + +// GetEnvironmentsForServiceParams defines parameters for GetEnvironmentsForService. +type GetEnvironmentsForServiceParams struct { + // ServiceName The name of the service + ServiceName *string `form:"serviceName,omitempty" json:"serviceName,omitempty"` + + // ElasticApiVersion The version of the API to use + ElasticApiVersion GetEnvironmentsForServiceParamsElasticApiVersion `json:"elastic-api-version"` +} + +// GetEnvironmentsForServiceParamsElasticApiVersion defines parameters for GetEnvironmentsForService. +type GetEnvironmentsForServiceParamsElasticApiVersion string + +// SearchSingleConfigurationParams defines parameters for SearchSingleConfiguration. +type SearchSingleConfigurationParams struct { + // ElasticApiVersion The version of the API to use + ElasticApiVersion SearchSingleConfigurationParamsElasticApiVersion `json:"elastic-api-version"` +} + +// SearchSingleConfigurationParamsElasticApiVersion defines parameters for SearchSingleConfiguration. +type SearchSingleConfigurationParamsElasticApiVersion string + +// GetSingleAgentConfigurationParams defines parameters for GetSingleAgentConfiguration. +type GetSingleAgentConfigurationParams struct { + // Name Service name + Name *string `form:"name,omitempty" json:"name,omitempty"` + + // Environment Service environment + Environment *string `form:"environment,omitempty" json:"environment,omitempty"` + + // ElasticApiVersion The version of the API to use + ElasticApiVersion GetSingleAgentConfigurationParamsElasticApiVersion `json:"elastic-api-version"` +} + +// GetSingleAgentConfigurationParamsElasticApiVersion defines parameters for GetSingleAgentConfiguration. +type GetSingleAgentConfigurationParamsElasticApiVersion string + +// GetSourceMapsParams defines parameters for GetSourceMaps. +type GetSourceMapsParams struct { + // Page Page number + Page *float32 `form:"page,omitempty" json:"page,omitempty"` + + // PerPage Number of records per page + PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` + + // ElasticApiVersion The version of the API to use + ElasticApiVersion GetSourceMapsParamsElasticApiVersion `json:"elastic-api-version"` +} + +// GetSourceMapsParamsElasticApiVersion defines parameters for GetSourceMaps. +type GetSourceMapsParamsElasticApiVersion string + +// UploadSourceMapParams defines parameters for UploadSourceMap. +type UploadSourceMapParams struct { + // ElasticApiVersion The version of the API to use + ElasticApiVersion UploadSourceMapParamsElasticApiVersion `json:"elastic-api-version"` +} + +// UploadSourceMapParamsElasticApiVersion defines parameters for UploadSourceMap. +type UploadSourceMapParamsElasticApiVersion string + +// DeleteSourceMapParams defines parameters for DeleteSourceMap. +type DeleteSourceMapParams struct { + // ElasticApiVersion The version of the API to use + ElasticApiVersion DeleteSourceMapParamsElasticApiVersion `json:"elastic-api-version"` +} + +// DeleteSourceMapParamsElasticApiVersion defines parameters for DeleteSourceMap. +type DeleteSourceMapParamsElasticApiVersion string + +// DeleteAssetCriticalityRecordParams defines parameters for DeleteAssetCriticalityRecord. +type DeleteAssetCriticalityRecordParams struct { + // IdValue The ID value of the asset. + IdValue string `form:"id_value" json:"id_value"` + + // IdField The field representing the ID. + IdField SecurityEntityAnalyticsAPIIdField `form:"id_field" json:"id_field"` + + // Refresh If 'wait_for' the request will wait for the index refresh. + Refresh *DeleteAssetCriticalityRecordParamsRefresh `form:"refresh,omitempty" json:"refresh,omitempty"` +} + +// DeleteAssetCriticalityRecordParamsRefresh defines parameters for DeleteAssetCriticalityRecord. +type DeleteAssetCriticalityRecordParamsRefresh string + +// GetAssetCriticalityRecordParams defines parameters for GetAssetCriticalityRecord. +type GetAssetCriticalityRecordParams struct { + // IdValue The ID value of the asset. + IdValue string `form:"id_value" json:"id_value"` + + // IdField The field representing the ID. + IdField SecurityEntityAnalyticsAPIIdField `form:"id_field" json:"id_field"` +} + +// CreateAssetCriticalityRecordJSONBody defines parameters for CreateAssetCriticalityRecord. +type CreateAssetCriticalityRecordJSONBody struct { + // CriticalityLevel The criticality level of the asset. + CriticalityLevel SecurityEntityAnalyticsAPIAssetCriticalityLevel `json:"criticality_level"` + IdField SecurityEntityAnalyticsAPIIdField `json:"id_field"` + + // IdValue The ID value of the asset. + IdValue string `json:"id_value"` + + // Refresh If 'wait_for' the request will wait for the index refresh. + Refresh *CreateAssetCriticalityRecordJSONBodyRefresh `json:"refresh,omitempty"` +} + +// CreateAssetCriticalityRecordJSONBodyRefresh defines parameters for CreateAssetCriticalityRecord. +type CreateAssetCriticalityRecordJSONBodyRefresh string + +// BulkUpsertAssetCriticalityRecordsJSONBody defines parameters for BulkUpsertAssetCriticalityRecords. +type BulkUpsertAssetCriticalityRecordsJSONBody struct { + Records []struct { + // CriticalityLevel The criticality level of the asset for bulk upload. The value `unassigned` is used to indicate that the criticality level is not assigned and is only used for bulk upload. + CriticalityLevel SecurityEntityAnalyticsAPIAssetCriticalityLevelsForBulkUpload `json:"criticality_level"` + IdField SecurityEntityAnalyticsAPIIdField `json:"id_field"` + + // IdValue The ID value of the asset. + IdValue string `json:"id_value"` + } `json:"records"` +} + +// FindAssetCriticalityRecordsParams defines parameters for FindAssetCriticalityRecords. +type FindAssetCriticalityRecordsParams struct { + // SortField The field to sort by. + SortField *FindAssetCriticalityRecordsParamsSortField `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortDirection The order to sort by. + SortDirection *FindAssetCriticalityRecordsParamsSortDirection `form:"sort_direction,omitempty" json:"sort_direction,omitempty"` + + // Page The page number to return. + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of records to return per page. + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // Kuery The kuery to filter by. + Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` +} + +// FindAssetCriticalityRecordsParamsSortField defines parameters for FindAssetCriticalityRecords. +type FindAssetCriticalityRecordsParamsSortField string + +// FindAssetCriticalityRecordsParamsSortDirection defines parameters for FindAssetCriticalityRecords. +type FindAssetCriticalityRecordsParamsSortDirection string + +// DeleteCaseDefaultSpaceParams defines parameters for DeleteCaseDefaultSpace. +type DeleteCaseDefaultSpaceParams struct { + // Ids The cases that you want to removed. All non-ASCII characters must be URL encoded. + Ids CasesIds `form:"ids" json:"ids"` +} + +// FindCasesDefaultSpaceParams defines parameters for FindCasesDefaultSpace. +type FindCasesDefaultSpaceParams struct { + // Assignees Filters the returned cases by assignees. Valid values are `none` or unique identifiers for the user profiles. These identifiers can be found by using the suggest user profile API. + Assignees *CasesAssigneesFilter `form:"assignees,omitempty" json:"assignees,omitempty"` + + // Category Filters the returned cases by category. + Category *CasesCategory `form:"category,omitempty" json:"category,omitempty"` + + // DefaultSearchOperator he default operator to use for the simple_query_string. + DefaultSearchOperator *CasesDefaultSearchOperator `form:"defaultSearchOperator,omitempty" json:"defaultSearchOperator,omitempty"` + + // From Returns only cases that were created after a specific date. The date must be specified as a KQL data range or date match expression. + From *CasesFrom `form:"from,omitempty" json:"from,omitempty"` + + // Owner A filter to limit the response to a specific set of applications. If this parameter is omitted, the response contains information about all the cases that the user has access to read. + Owner *CasesOwnerFilter `form:"owner,omitempty" json:"owner,omitempty"` + + // Page The page number to return. + Page *CasesPageIndex `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of items to return. Limited to 100 items. + PerPage *CasesPageSize `form:"perPage,omitempty" json:"perPage,omitempty"` + + // Reporters Filters the returned cases by the user name of the reporter. + Reporters *CasesReporters `form:"reporters,omitempty" json:"reporters,omitempty"` + + // Search An Elasticsearch simple_query_string query that filters the objects in the response. + Search *CasesSearch `form:"search,omitempty" json:"search,omitempty"` + + // SearchFields The fields to perform the simple_query_string parsed query against. + SearchFields *CasesSearchFields `form:"searchFields,omitempty" json:"searchFields,omitempty"` + + // Severity The severity of the case. + Severity *FindCasesDefaultSpaceParamsSeverity `form:"severity,omitempty" json:"severity,omitempty"` + + // SortField Determines which field is used to sort the results. + SortField *FindCasesDefaultSpaceParamsSortField `form:"sortField,omitempty" json:"sortField,omitempty"` + + // SortOrder Determines the sort order. + SortOrder *FindCasesDefaultSpaceParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` + + // Status Filters the returned cases by state. + Status *FindCasesDefaultSpaceParamsStatus `form:"status,omitempty" json:"status,omitempty"` + + // Tags Filters the returned cases by tags. + Tags *CasesTags `form:"tags,omitempty" json:"tags,omitempty"` + + // To Returns only cases that were created before a specific date. The date must be specified as a KQL data range or date match expression. + To *CasesTo `form:"to,omitempty" json:"to,omitempty"` +} + +// FindCasesDefaultSpaceParamsSeverity defines parameters for FindCasesDefaultSpace. +type FindCasesDefaultSpaceParamsSeverity string + +// FindCasesDefaultSpaceParamsSortField defines parameters for FindCasesDefaultSpace. +type FindCasesDefaultSpaceParamsSortField string + +// FindCasesDefaultSpaceParamsSortOrder defines parameters for FindCasesDefaultSpace. +type FindCasesDefaultSpaceParamsSortOrder string + +// FindCasesDefaultSpaceParamsStatus defines parameters for FindCasesDefaultSpace. +type FindCasesDefaultSpaceParamsStatus string + +// GetCasesByAlertDefaultSpaceParams defines parameters for GetCasesByAlertDefaultSpace. +type GetCasesByAlertDefaultSpaceParams struct { + // Owner A filter to limit the response to a specific set of applications. If this parameter is omitted, the response contains information about all the cases that the user has access to read. + Owner *CasesOwnerFilter `form:"owner,omitempty" json:"owner,omitempty"` +} + +// GetCaseConfigurationDefaultSpaceParams defines parameters for GetCaseConfigurationDefaultSpace. +type GetCaseConfigurationDefaultSpaceParams struct { + // Owner A filter to limit the response to a specific set of applications. If this parameter is omitted, the response contains information about all the cases that the user has access to read. + Owner *CasesOwnerFilter `form:"owner,omitempty" json:"owner,omitempty"` +} + +// GetCaseReportersDefaultSpaceParams defines parameters for GetCaseReportersDefaultSpace. +type GetCaseReportersDefaultSpaceParams struct { + // Owner A filter to limit the response to a specific set of applications. If this parameter is omitted, the response contains information about all the cases that the user has access to read. + Owner *CasesOwnerFilter `form:"owner,omitempty" json:"owner,omitempty"` +} + +// GetCaseTagsDefaultSpaceParams defines parameters for GetCaseTagsDefaultSpace. +type GetCaseTagsDefaultSpaceParams struct { + // Owner A filter to limit the response to a specific set of applications. If this parameter is omitted, the response contains information about all the cases that the user has access to read. + Owner *CasesOwnerFilter `form:"owner,omitempty" json:"owner,omitempty"` +} + +// FindCaseCommentsDefaultSpaceParams defines parameters for FindCaseCommentsDefaultSpace. +type FindCaseCommentsDefaultSpaceParams struct { + // Page The page number to return. + Page *CasesPageIndex `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of items to return. Limited to 100 items. + PerPage *CasesPageSize `form:"perPage,omitempty" json:"perPage,omitempty"` + + // SortOrder Determines the sort order. + SortOrder *FindCaseCommentsDefaultSpaceParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` +} + +// FindCaseCommentsDefaultSpaceParamsSortOrder defines parameters for FindCaseCommentsDefaultSpace. +type FindCaseCommentsDefaultSpaceParamsSortOrder string + +// PushCaseDefaultSpaceJSONBody defines parameters for PushCaseDefaultSpace. +type PushCaseDefaultSpaceJSONBody = map[string]interface{} + +// FindCaseActivityDefaultSpaceParams defines parameters for FindCaseActivityDefaultSpace. +type FindCaseActivityDefaultSpaceParams struct { + // Page The page number to return. + Page *CasesPageIndex `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of items to return. Limited to 100 items. + PerPage *CasesPageSize `form:"perPage,omitempty" json:"perPage,omitempty"` + + // SortOrder Determines the sort order. + SortOrder *FindCaseActivityDefaultSpaceParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` + + // Types Determines the types of user actions to return. + Types *CasesUserActionTypes `form:"types,omitempty" json:"types,omitempty"` +} + +// FindCaseActivityDefaultSpaceParamsSortOrder defines parameters for FindCaseActivityDefaultSpace. +type FindCaseActivityDefaultSpaceParamsSortOrder string + +// FindCaseActivityDefaultSpaceParamsTypes defines parameters for FindCaseActivityDefaultSpace. +type FindCaseActivityDefaultSpaceParamsTypes string + +// UpdateFieldsMetadataDefaultJSONBody defines parameters for UpdateFieldsMetadataDefault. +type UpdateFieldsMetadataDefaultJSONBody struct { + // Fields The field object. + Fields map[string]interface{} `json:"fields"` +} + +// CreateRuntimeFieldDefaultJSONBody defines parameters for CreateRuntimeFieldDefault. +type CreateRuntimeFieldDefaultJSONBody struct { + // Name The name for a runtime field. + Name string `json:"name"` + + // RuntimeField The runtime field definition object. + RuntimeField map[string]interface{} `json:"runtimeField"` +} + +// CreateUpdateRuntimeFieldDefaultJSONBody defines parameters for CreateUpdateRuntimeFieldDefault. +type CreateUpdateRuntimeFieldDefaultJSONBody struct { + // Name The name for a runtime field. + Name string `json:"name"` + + // RuntimeField The runtime field definition object. + RuntimeField map[string]interface{} `json:"runtimeField"` +} + +// UpdateRuntimeFieldDefaultJSONBody defines parameters for UpdateRuntimeFieldDefault. +type UpdateRuntimeFieldDefaultJSONBody struct { + // RuntimeField The runtime field definition object. + // + // You can update following fields: + // + // - `type` + // - `script` + RuntimeField map[string]interface{} `json:"runtimeField"` +} + +// SetDefaultDatailViewDefaultJSONBody defines parameters for SetDefaultDatailViewDefault. +type SetDefaultDatailViewDefaultJSONBody struct { + // DataViewId The data view identifier. NOTE: The API does not validate whether it is a valid identifier. Use `null` to unset the default data view. + DataViewId *string `json:"data_view_id,omitempty"` + + // Force Update an existing default data view identifier. + Force *bool `json:"force,omitempty"` +} + +// DeleteRuleParams defines parameters for DeleteRule. +type DeleteRuleParams struct { + // Id The rule's `id` value. + Id *SecurityDetectionsAPIRuleObjectId `form:"id,omitempty" json:"id,omitempty"` + + // RuleId The rule's `rule_id` value. + RuleId *SecurityDetectionsAPIRuleSignatureId `form:"rule_id,omitempty" json:"rule_id,omitempty"` +} + +// ReadRuleParams defines parameters for ReadRule. +type ReadRuleParams struct { + // Id The rule's `id` value. + Id *SecurityDetectionsAPIRuleObjectId `form:"id,omitempty" json:"id,omitempty"` + + // RuleId The rule's `rule_id` value. + RuleId *SecurityDetectionsAPIRuleSignatureId `form:"rule_id,omitempty" json:"rule_id,omitempty"` +} + +// PerformRulesBulkActionJSONBody defines parameters for PerformRulesBulkAction. +type PerformRulesBulkActionJSONBody struct { + union json.RawMessage +} + +// PerformRulesBulkActionParams defines parameters for PerformRulesBulkAction. +type PerformRulesBulkActionParams struct { + // DryRun Enables dry run mode for the request call. + // + // Enable dry run mode to verify that bulk actions can be applied to specified rules. Certain rules, such as prebuilt Elastic rules on a Basic subscription, can’t be edited and will return errors in the request response. Error details will contain an explanation, the rule name and/or ID, and additional troubleshooting information. + // + // To enable dry run mode on a request, add the query parameter `dry_run=true` to the end of the request URL. Rules specified in the request will be temporarily updated. These updates won’t be written to Elasticsearch. + // > info + // > Dry run mode is not supported for the `export` bulk action. A 400 error will be returned in the request response. + DryRun *bool `form:"dry_run,omitempty" json:"dry_run,omitempty"` +} + +// ExportRulesJSONBody defines parameters for ExportRules. +type ExportRulesJSONBody struct { + // Objects Array of objects with a rule's `rule_id` field. Do not use rule's `id` here. Exports all rules when unspecified. + Objects []struct { + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId SecurityDetectionsAPIRuleSignatureId `json:"rule_id"` + } `json:"objects"` +} + +// ExportRulesParams defines parameters for ExportRules. +type ExportRulesParams struct { + // ExcludeExportDetails Determines whether a summary of the exported rules is returned. + ExcludeExportDetails *bool `form:"exclude_export_details,omitempty" json:"exclude_export_details,omitempty"` + + // FileName File name for saving the exported rules. + // > info + // > When using cURL to export rules to a file, use the -O and -J options to save the rules to the file name specified in the URL. + FileName *string `form:"file_name,omitempty" json:"file_name,omitempty"` +} + +// FindRulesParams defines parameters for FindRules. +type FindRulesParams struct { + Fields *[]string `form:"fields,omitempty" json:"fields,omitempty"` + + // Filter Search query + // + // Filters the returned results according to the value of the specified field, using the alert.attributes.: syntax, where can be: + // - name + // - enabled + // - tags + // - createdBy + // - interval + // - updatedBy + // > info + // > Even though the JSON rule object uses created_by and updated_by fields, you must use createdBy and updatedBy fields in the filter. + Filter *string `form:"filter,omitempty" json:"filter,omitempty"` + + // SortField Field to sort by + SortField *SecurityDetectionsAPIFindRulesSortField `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder Sort order + SortOrder *SecurityDetectionsAPISortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` + + // Page Page number + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage Rules per page + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // GapsRangeStart Gaps range start + GapsRangeStart *string `form:"gaps_range_start,omitempty" json:"gaps_range_start,omitempty"` + + // GapsRangeEnd Gaps range end + GapsRangeEnd *string `form:"gaps_range_end,omitempty" json:"gaps_range_end,omitempty"` +} + +// ImportRulesMultipartBody defines parameters for ImportRules. +type ImportRulesMultipartBody struct { + // File The `.ndjson` file containing the rules. + File *openapi_types.File `json:"file,omitempty"` +} + +// ImportRulesParams defines parameters for ImportRules. +type ImportRulesParams struct { + // Overwrite Determines whether existing rules with the same `rule_id` are overwritten. + Overwrite *bool `form:"overwrite,omitempty" json:"overwrite,omitempty"` + + // OverwriteExceptions Determines whether existing exception lists with the same `list_id` are overwritten. Both the exception list container and its items are overwritten. + OverwriteExceptions *bool `form:"overwrite_exceptions,omitempty" json:"overwrite_exceptions,omitempty"` + + // OverwriteActionConnectors Determines whether existing actions with the same `kibana.alert.rule.actions.id` are overwritten. + OverwriteActionConnectors *bool `form:"overwrite_action_connectors,omitempty" json:"overwrite_action_connectors,omitempty"` + + // AsNewList Generates a new list ID for each imported exception list. + AsNewList *bool `form:"as_new_list,omitempty" json:"as_new_list,omitempty"` +} + +// RulePreviewJSONBody defines parameters for RulePreview. +type RulePreviewJSONBody struct { + union json.RawMessage +} + +// RulePreviewParams defines parameters for RulePreview. +type RulePreviewParams struct { + // EnableLoggedRequests Enables logging and returning in response ES queries, performed during rule execution + EnableLoggedRequests *bool `form:"enable_logged_requests,omitempty" json:"enable_logged_requests,omitempty"` +} + +// RulePreviewJSONBody0 defines parameters for RulePreview. +type RulePreviewJSONBody0 struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + EventCategoryOverride *SecurityDetectionsAPIEventCategoryOverride `json:"event_category_override,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + InvocationCount int `json:"invocationCount"` + Language SecurityDetectionsAPIEqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + + // TiebreakerField Sets a secondary field for sorting events + TiebreakerField *SecurityDetectionsAPITiebreakerField `json:"tiebreaker_field,omitempty"` + TimeframeEnd time.Time `json:"timeframeEnd"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampField Specifies the name of the event timestamp field used for sorting a sequence of events. Not to be confused with `timestamp_override`, which specifies the more general field used for querying events within a range. Defaults to the @timestamp ECS field. + TimestampField *SecurityDetectionsAPITimestampField `json:"timestamp_field,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type RulePreviewJSONBody0Type `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// RulePreviewJSONBody0Type defines parameters for RulePreview. +type RulePreviewJSONBody0Type string + +// RulePreviewJSONBody1 defines parameters for RulePreview. +type RulePreviewJSONBody1 struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + InvocationCount int `json:"invocationCount"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + TimeframeEnd time.Time `json:"timeframeEnd"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type RulePreviewJSONBody1Type `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// RulePreviewJSONBody1Type defines parameters for RulePreview. +type RulePreviewJSONBody1Type string + +// RulePreviewJSONBody2 defines parameters for RulePreview. +type RulePreviewJSONBody2 struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + InvocationCount int `json:"invocationCount"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query *SecurityDetectionsAPIRuleQuery `json:"query,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId SecurityDetectionsAPISavedQueryId `json:"saved_id"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + TimeframeEnd time.Time `json:"timeframeEnd"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type RulePreviewJSONBody2Type `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// RulePreviewJSONBody2Type defines parameters for RulePreview. +type RulePreviewJSONBody2Type string + +// RulePreviewJSONBody3 defines parameters for RulePreview. +type RulePreviewJSONBody3 struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIThresholdAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + InvocationCount int `json:"invocationCount"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + Threshold SecurityDetectionsAPIThreshold `json:"threshold"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + TimeframeEnd time.Time `json:"timeframeEnd"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type RulePreviewJSONBody3Type `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// RulePreviewJSONBody3Type defines parameters for RulePreview. +type RulePreviewJSONBody3Type string + +// RulePreviewJSONBody4 defines parameters for RulePreview. +type RulePreviewJSONBody4 struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + ConcurrentSearches *SecurityDetectionsAPIConcurrentSearches `json:"concurrent_searches,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + InvocationCount int `json:"invocationCount"` + ItemsPerSearch *SecurityDetectionsAPIItemsPerSearch `json:"items_per_search,omitempty"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // SavedId Kibana [saved search](https://www.elastic.co/guide/en/kibana/current/save-open-search.html) used by the rule to create alerts. + SavedId *SecurityDetectionsAPISavedQueryId `json:"saved_id,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + ThreatFilters *SecurityDetectionsAPIThreatFilters `json:"threat_filters,omitempty"` + + // ThreatIndex Elasticsearch indices used to check which field values generate alerts. + ThreatIndex SecurityDetectionsAPIThreatIndex `json:"threat_index"` + + // ThreatIndicatorPath Defines the path to the threat indicator in the indicator documents (optional) + ThreatIndicatorPath *SecurityDetectionsAPIThreatIndicatorPath `json:"threat_indicator_path,omitempty"` + ThreatLanguage *SecurityDetectionsAPIKqlQueryLanguage `json:"threat_language,omitempty"` + + // ThreatMapping Array of entries objects that define mappings between the source event fields and the values in the Elasticsearch threat index. Each entries object must contain these fields: + // + // - field: field from the event indices on which the rule runs + // - type: must be mapping + // - value: field from the Elasticsearch threat index + // + // You can use Boolean and and or logic to define the conditions for when matching fields and values generate alerts. Sibling entries objects are evaluated using or logic, whereas multiple entries in a single entries object use and logic. See Example of Threat Match rule which uses both `and` and `or` logic. + ThreatMapping SecurityDetectionsAPIThreatMapping `json:"threat_mapping"` + + // ThreatQuery Query used to determine which fields in the Elasticsearch index are used for generating alerts. + ThreatQuery SecurityDetectionsAPIThreatQuery `json:"threat_query"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + TimeframeEnd time.Time `json:"timeframeEnd"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type RulePreviewJSONBody4Type `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// RulePreviewJSONBody4Type defines parameters for RulePreview. +type RulePreviewJSONBody4Type string + +// RulePreviewJSONBody5 defines parameters for RulePreview. +type RulePreviewJSONBody5 struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // AnomalyThreshold Anomaly score threshold above which the rule creates an alert. Valid values are from 0 to 100. + AnomalyThreshold SecurityDetectionsAPIAnomalyThreshold `json:"anomaly_threshold"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + InvocationCount int `json:"invocationCount"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MachineLearningJobId Machine learning job ID(s) the rule monitors for anomaly scores. + MachineLearningJobId SecurityDetectionsAPIMachineLearningJobId `json:"machine_learning_job_id"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + TimeframeEnd time.Time `json:"timeframeEnd"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type RulePreviewJSONBody5Type `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// RulePreviewJSONBody5Type defines parameters for RulePreview. +type RulePreviewJSONBody5Type string + +// RulePreviewJSONBody6 defines parameters for RulePreview. +type RulePreviewJSONBody6 struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + DataViewId *SecurityDetectionsAPIDataViewId `json:"data_view_id,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // Filters The query and filter context array used to define the conditions for when alerts are created from events. Defaults to an empty array. + // > info + // > This field is not supported for ES|QL rules. + Filters *SecurityDetectionsAPIRuleFilterArray `json:"filters,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // HistoryWindowStart Start date to use when checking if a term has been seen before. Supports relative dates – for example, now-30d will search the last 30 days of data when checking if a term is new. We do not recommend using absolute dates, which can cause issues with rule performance due to querying increasing amounts of data over time. + HistoryWindowStart SecurityDetectionsAPIHistoryWindowStart `json:"history_window_start"` + + // Index Indices on which the rule functions. Defaults to the Security Solution indices defined on the Kibana Advanced Settings page (Kibana → Stack Management → Advanced Settings → `securitySolution:defaultIndex`). + // > info + // > This field is not supported for ES|QL rules. + Index *SecurityDetectionsAPIIndexPatternArray `json:"index,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + InvocationCount int `json:"invocationCount"` + Language *SecurityDetectionsAPIKqlQueryLanguage `json:"language,omitempty"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // NewTermsFields Fields to monitor for new values. + NewTermsFields SecurityDetectionsAPINewTermsFields `json:"new_terms_fields"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + TimeframeEnd time.Time `json:"timeframeEnd"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type RulePreviewJSONBody6Type `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// RulePreviewJSONBody6Type defines parameters for RulePreview. +type RulePreviewJSONBody6Type string + +// RulePreviewJSONBody7 defines parameters for RulePreview. +type RulePreviewJSONBody7 struct { + // Actions Array defining the automated actions (notifications) taken when alerts are generated. + Actions *[]SecurityDetectionsAPIRuleAction `json:"actions,omitempty"` + + // AlertSuppression Defines alert suppression configuration. + AlertSuppression *SecurityDetectionsAPIAlertSuppression `json:"alert_suppression,omitempty"` + AliasPurpose *SecurityDetectionsAPISavedObjectResolveAliasPurpose `json:"alias_purpose,omitempty"` + AliasTargetId *SecurityDetectionsAPISavedObjectResolveAliasTargetId `json:"alias_target_id,omitempty"` + + // Author The rule’s author. + Author *SecurityDetectionsAPIRuleAuthorArray `json:"author,omitempty"` + + // BuildingBlockType Determines if the rule acts as a building block. If yes, the value must be `default`. + // By default, building-block alerts are not displayed in the UI. These rules are used as a foundation for other rules that do generate alerts. + // For more information, refer to [About building block rules](https://www.elastic.co/guide/en/security/current/building-block-rule.html). + BuildingBlockType *SecurityDetectionsAPIBuildingBlockType `json:"building_block_type,omitempty"` + + // Description The rule’s description. + Description SecurityDetectionsAPIRuleDescription `json:"description"` + + // Enabled Determines whether the rule is enabled. Defaults to true. + Enabled *SecurityDetectionsAPIIsRuleEnabled `json:"enabled,omitempty"` + ExceptionsList *[]SecurityDetectionsAPIRuleExceptionList `json:"exceptions_list,omitempty"` + + // FalsePositives String array used to describe common reasons why the rule may issue false-positive alerts. Defaults to an empty array. + FalsePositives *SecurityDetectionsAPIRuleFalsePositiveArray `json:"false_positives,omitempty"` + + // From Time from which data is analyzed each time the rule runs, using a date math range. For example, now-4200s means the rule analyzes data from 70 minutes before its start time. Defaults to now-6m (analyzes data from 6 minutes before the start time). + From *SecurityDetectionsAPIRuleIntervalFrom `json:"from,omitempty"` + + // Interval Frequency of rule execution, using a date math range. For example, "1h" means the rule runs every hour. Defaults to 5m (5 minutes). + Interval *SecurityDetectionsAPIRuleInterval `json:"interval,omitempty"` + + // InvestigationFields Schema for fields relating to investigation fields. These are user defined fields we use to highlight + // in various features in the UI such as alert details flyout and exceptions auto-population from alert. + InvestigationFields *SecurityDetectionsAPIInvestigationFields `json:"investigation_fields,omitempty"` + InvocationCount int `json:"invocationCount"` + Language SecurityDetectionsAPIEsqlQueryLanguage `json:"language"` + + // License The rule's license. + License *SecurityDetectionsAPIRuleLicense `json:"license,omitempty"` + + // MaxSignals Maximum number of alerts the rule can create during a single run (the rule’s Max alerts per run [advanced setting](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#rule-ui-advanced-params) value). + // > info + // > This setting can be superseded by the [Kibana configuration setting](https://www.elastic.co/guide/en/kibana/current/alert-action-settings-kb.html#alert-settings) `xpack.alerting.rules.run.alerts.max`, which determines the maximum alerts generated by any rule in the Kibana alerting framework. For example, if `xpack.alerting.rules.run.alerts.max` is set to 1000, the rule can generate no more than 1000 alerts even if `max_signals` is set higher. + MaxSignals *SecurityDetectionsAPIMaxSignals `json:"max_signals,omitempty"` + + // Meta Placeholder for metadata about the rule. + // > info + // > This field is overwritten when you save changes to the rule’s settings. + Meta *SecurityDetectionsAPIRuleMetadata `json:"meta,omitempty"` + + // Name A human-readable name for the rule. + Name SecurityDetectionsAPIRuleName `json:"name"` + + // Namespace Has no effect. + Namespace *SecurityDetectionsAPIAlertsIndexNamespace `json:"namespace,omitempty"` + + // Note Notes to help investigate alerts produced by the rule. + Note *SecurityDetectionsAPIInvestigationGuide `json:"note,omitempty"` + Outcome *SecurityDetectionsAPISavedObjectResolveOutcome `json:"outcome,omitempty"` + + // OutputIndex (deprecated) Has no effect. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + OutputIndex *SecurityDetectionsAPIAlertsIndex `json:"output_index,omitempty"` + + // Query [Query](https://www.elastic.co/guide/en/kibana/8.17/search.html) used by the rule to create alerts. + // + // - For indicator match rules, only the query’s results are used to determine whether an alert is generated. + // - ES|QL rules have additional query requirements. Refer to [Create ES|QL](https://www.elastic.co/guide/en/security/current/rules-ui-create.html#create-esql-rule) rules for more information. + Query SecurityDetectionsAPIRuleQuery `json:"query"` + + // References Array containing notes about or references to relevant information about the rule. Defaults to an empty array. + References *SecurityDetectionsAPIRuleReferenceArray `json:"references,omitempty"` + RelatedIntegrations *SecurityDetectionsAPIRelatedIntegrationArray `json:"related_integrations,omitempty"` + + // RequiredFields Elasticsearch fields and their types that need to be present for the rule to function. + // > info + // > The value of `required_fields` does not affect the rule’s behavior, and specifying it incorrectly won’t cause the rule to fail. Use `required_fields` as an informational property to document the fields that the rule expects to be present in the data. + RequiredFields *[]SecurityDetectionsAPIRequiredFieldInput `json:"required_fields,omitempty"` + ResponseActions *[]SecurityDetectionsAPIResponseAction `json:"response_actions,omitempty"` + + // RiskScore A numerical representation of the alert's severity from 0 to 100, where: + // * `0` - `21` represents low severity + // * `22` - `47` represents medium severity + // * `48` - `73` represents high severity + // * `74` - `100` represents critical severity + RiskScore SecurityDetectionsAPIRiskScore `json:"risk_score"` + + // RiskScoreMapping Overrides generated alerts' risk_score with a value from the source event + RiskScoreMapping *SecurityDetectionsAPIRiskScoreMapping `json:"risk_score_mapping,omitempty"` + + // RuleId A stable unique identifier for the rule object. It can be assigned during rule creation. It can be any string, but often is a UUID. It should be unique not only within a given Kibana space, but also across spaces and Elastic environments. The same prebuilt Elastic rule, when installed in two different Kibana spaces or two different Elastic environments, will have the same `rule_id`s. + RuleId *SecurityDetectionsAPIRuleSignatureId `json:"rule_id,omitempty"` + + // RuleNameOverride Sets which field in the source event is used to populate the alert's `signal.rule.name` value (in the UI, this value is displayed on the Rules page in the Rule column). When unspecified, the rule’s `name` value is used. The source field must be a string data type. + RuleNameOverride *SecurityDetectionsAPIRuleNameOverride `json:"rule_name_override,omitempty"` + + // Setup Populates the rule’s setup guide with instructions on rule prerequisites such as required integrations, configuration steps, and anything else needed for the rule to work correctly. + Setup *SecurityDetectionsAPISetupGuide `json:"setup,omitempty"` + + // Severity Severity level of alerts produced by the rule, which must be one of the following: + // * `low`: Alerts that are of interest but generally not considered to be security incidents + // * `medium`: Alerts that require investigation + // * `high`: Alerts that require immediate investigation + // * `critical`: Alerts that indicate it is highly likely a security incident has occurred + Severity SecurityDetectionsAPISeverity `json:"severity"` + + // SeverityMapping Overrides generated alerts' severity with values from the source event + SeverityMapping *SecurityDetectionsAPISeverityMapping `json:"severity_mapping,omitempty"` + + // Tags String array containing words and phrases to help categorize, filter, and search rules. Defaults to an empty array. + Tags *SecurityDetectionsAPIRuleTagArray `json:"tags,omitempty"` + Threat *SecurityDetectionsAPIThreatArray `json:"threat,omitempty"` + + // Throttle Defines how often rule actions are taken. + Throttle *SecurityDetectionsAPIRuleActionThrottle `json:"throttle,omitempty"` + TimeframeEnd time.Time `json:"timeframeEnd"` + + // TimelineId Timeline template ID + TimelineId *SecurityDetectionsAPITimelineTemplateId `json:"timeline_id,omitempty"` + + // TimelineTitle Timeline template title + TimelineTitle *SecurityDetectionsAPITimelineTemplateTitle `json:"timeline_title,omitempty"` + + // TimestampOverride Sets the time field used to query indices. When unspecified, rules query the `@timestamp` field. The source field must be an Elasticsearch date data type. + TimestampOverride *SecurityDetectionsAPITimestampOverride `json:"timestamp_override,omitempty"` + + // TimestampOverrideFallbackDisabled Disables the fallback to the event's @timestamp field + TimestampOverrideFallbackDisabled *SecurityDetectionsAPITimestampOverrideFallbackDisabled `json:"timestamp_override_fallback_disabled,omitempty"` + To *SecurityDetectionsAPIRuleIntervalTo `json:"to,omitempty"` + + // Type Rule type + Type RulePreviewJSONBody7Type `json:"type"` + + // Version The rule's version number. + // + // - For prebuilt rules it represents the version of the rule's content in the source [detection-rules](https://github.com/elastic/detection-rules) repository (and the corresponding `security_detection_engine` Fleet package that is used for distributing prebuilt rules). + // - For custom rules it is set to `1` when the rule is created. + // > info + // > It is not incremented on each update. Compare this to the `revision` field. + Version *SecurityDetectionsAPIRuleVersion `json:"version,omitempty"` +} + +// RulePreviewJSONBody7Type defines parameters for RulePreview. +type RulePreviewJSONBody7Type string + +// CreateRuleExceptionListItemsJSONBody defines parameters for CreateRuleExceptionListItems. +type CreateRuleExceptionListItemsJSONBody struct { + Items []SecurityExceptionsAPICreateRuleExceptionListItemProps `json:"items"` +} + +// SetAlertAssigneesJSONBody defines parameters for SetAlertAssignees. +type SetAlertAssigneesJSONBody struct { + Assignees SecurityDetectionsAPIAlertAssignees `json:"assignees"` + + // Ids A list of alerts `id`s. + Ids SecurityDetectionsAPIAlertIds `json:"ids"` +} + +// FinalizeAlertsMigrationJSONBody defines parameters for FinalizeAlertsMigration. +type FinalizeAlertsMigrationJSONBody struct { + // MigrationIds Array of `migration_id`s to finalize. + MigrationIds []string `json:"migration_ids"` +} + +// AlertsMigrationCleanupJSONBody defines parameters for AlertsMigrationCleanup. +type AlertsMigrationCleanupJSONBody struct { + // MigrationIds Array of `migration_id`s to cleanup. + MigrationIds []string `json:"migration_ids"` +} + +// CreateAlertsMigrationJSONBody defines parameters for CreateAlertsMigration. +type CreateAlertsMigrationJSONBody struct { + // Index Array of index names to migrate. + Index []string `json:"index"` + + // RequestsPerSecond The throttle for the migration task in sub-requests per second. Corresponds to requests_per_second on the Reindex API. + RequestsPerSecond *int `json:"requests_per_second,omitempty"` + + // Size Number of alerts to migrate per batch. Corresponds to the source.size option on the Reindex API. + Size *int `json:"size,omitempty"` + + // Slices The number of subtasks for the migration task. Corresponds to slices on the Reindex API. + Slices *int `json:"slices,omitempty"` +} + +// ReadAlertsMigrationStatusParams defines parameters for ReadAlertsMigrationStatus. +type ReadAlertsMigrationStatusParams struct { + // From Maximum age of qualifying detection alerts + From string `form:"from" json:"from"` +} + +// SearchAlertsJSONBody defines parameters for SearchAlerts. +type SearchAlertsJSONBody struct { + UnderscoreSource *SearchAlertsJSONBody_Source `json:"_source,omitempty"` + Aggs *map[string]interface{} `json:"aggs,omitempty"` + Fields *[]string `json:"fields,omitempty"` + Query *map[string]interface{} `json:"query,omitempty"` + RuntimeMappings *map[string]interface{} `json:"runtime_mappings,omitempty"` + Size *int `json:"size,omitempty"` + Sort *SecurityDetectionsAPIAlertsSort `json:"sort,omitempty"` + TrackTotalHits *bool `json:"track_total_hits,omitempty"` +} + +// SearchAlertsJSONBodySource0 defines parameters for SearchAlerts. +type SearchAlertsJSONBodySource0 = bool + +// SearchAlertsJSONBodySource1 defines parameters for SearchAlerts. +type SearchAlertsJSONBodySource1 = string + +// SearchAlertsJSONBodySource2 defines parameters for SearchAlerts. +type SearchAlertsJSONBodySource2 = []string + +// SearchAlertsJSONBody_Source defines parameters for SearchAlerts. +type SearchAlertsJSONBody_Source struct { + union json.RawMessage +} + +// SetAlertsStatusJSONBody defines parameters for SetAlertsStatus. +type SetAlertsStatusJSONBody struct { + union json.RawMessage +} + +// SetAlertTagsJSONBody defines parameters for SetAlertTags. +type SetAlertTagsJSONBody struct { + // Ids A list of alerts `id`s. + Ids SecurityDetectionsAPIAlertIds `json:"ids"` + + // Tags Object with list of tags to add and remove. + Tags SecurityDetectionsAPISetAlertTags `json:"tags"` +} + +// RotateEncryptionKeyParams defines parameters for RotateEncryptionKey. +type RotateEncryptionKeyParams struct { + // BatchSize Specifies a maximum number of saved objects that Kibana can process in a single batch. Bulk key rotation is an iterative process since Kibana may not be able to fetch and process all required saved objects in one go and splits processing into consequent batches. By default, the batch size is 10000, which is also a maximum allowed value. + BatchSize *float32 `form:"batch_size,omitempty" json:"batch_size,omitempty"` + + // Type Limits encryption key rotation only to the saved objects with the specified type. By default, Kibana tries to rotate the encryption key for all saved object types that may contain encrypted attributes. + Type *string `form:"type,omitempty" json:"type,omitempty"` +} + +// EndpointGetActionsListParams defines parameters for EndpointGetActionsList. +type EndpointGetActionsListParams struct { + Page *SecurityEndpointManagementAPIPage `form:"page,omitempty" json:"page,omitempty"` + PageSize *SecurityEndpointManagementAPIPageSize `form:"pageSize,omitempty" json:"pageSize,omitempty"` + Commands *SecurityEndpointManagementAPICommands `form:"commands,omitempty" json:"commands,omitempty"` + AgentIds *SecurityEndpointManagementAPIAgentIds `form:"agentIds,omitempty" json:"agentIds,omitempty"` + UserIds *SecurityEndpointManagementAPIUserIds `form:"userIds,omitempty" json:"userIds,omitempty"` + StartDate *SecurityEndpointManagementAPIStartDate `form:"startDate,omitempty" json:"startDate,omitempty"` + EndDate *SecurityEndpointManagementAPIEndDate `form:"endDate,omitempty" json:"endDate,omitempty"` + AgentTypes *SecurityEndpointManagementAPIAgentTypes `form:"agentTypes,omitempty" json:"agentTypes,omitempty"` + WithOutputs *SecurityEndpointManagementAPIWithOutputs `form:"withOutputs,omitempty" json:"withOutputs,omitempty"` + Types *SecurityEndpointManagementAPITypes `form:"types,omitempty" json:"types,omitempty"` +} + +// EndpointIsolateActionJSONBody defines parameters for EndpointIsolateAction. +type EndpointIsolateActionJSONBody struct { + // AgentType List of agent types to retrieve. Defaults to `endpoint`. + AgentType *SecurityEndpointManagementAPIAgentTypes `json:"agent_type,omitempty"` + + // AlertIds If this action is associated with any alerts, they can be specified here. The action will be logged in any cases associated with the specified alerts. + AlertIds *[]string `json:"alert_ids,omitempty"` + + // CaseIds The IDs of cases where the action taken will be logged. + CaseIds *[]string `json:"case_ids,omitempty"` + + // Comment Optional comment + Comment *SecurityEndpointManagementAPIComment `json:"comment,omitempty"` + + // EndpointIds List of endpoint IDs (cannot contain empty strings) + EndpointIds SecurityEndpointManagementAPIEndpointIds `json:"endpoint_ids"` + + // Parameters Optional parameters object + Parameters *SecurityEndpointManagementAPIParameters `json:"parameters,omitempty"` +} + +// EndpointUnisolateActionJSONBody defines parameters for EndpointUnisolateAction. +type EndpointUnisolateActionJSONBody struct { + // AgentType List of agent types to retrieve. Defaults to `endpoint`. + AgentType *SecurityEndpointManagementAPIAgentTypes `json:"agent_type,omitempty"` + + // AlertIds If this action is associated with any alerts, they can be specified here. The action will be logged in any cases associated with the specified alerts. + AlertIds *[]string `json:"alert_ids,omitempty"` + + // CaseIds The IDs of cases where the action taken will be logged. + CaseIds *[]string `json:"case_ids,omitempty"` + + // Comment Optional comment + Comment *SecurityEndpointManagementAPIComment `json:"comment,omitempty"` + + // EndpointIds List of endpoint IDs (cannot contain empty strings) + EndpointIds SecurityEndpointManagementAPIEndpointIds `json:"endpoint_ids"` + + // Parameters Optional parameters object + Parameters *SecurityEndpointManagementAPIParameters `json:"parameters,omitempty"` +} + +// EndpointGetActionsStatusParams defines parameters for EndpointGetActionsStatus. +type EndpointGetActionsStatusParams struct { + Query struct { + // AgentIds A list of agent IDs. Max of 50. + AgentIds *SecurityEndpointManagementAPIAgentIds `json:"agent_ids,omitempty"` + } `form:"query" json:"query"` +} + +// GetEndpointMetadataListParams defines parameters for GetEndpointMetadataList. +type GetEndpointMetadataListParams struct { + Page *SecurityEndpointManagementAPIPage `form:"page,omitempty" json:"page,omitempty"` + PageSize *SecurityEndpointManagementAPIPageSize `form:"pageSize,omitempty" json:"pageSize,omitempty"` + Kuery *SecurityEndpointManagementAPIKuery `form:"kuery,omitempty" json:"kuery,omitempty"` + HostStatuses SecurityEndpointManagementAPIHostStatuses `form:"hostStatuses" json:"hostStatuses"` + SortField *SecurityEndpointManagementAPISortField `form:"sortField,omitempty" json:"sortField,omitempty"` + SortDirection *SecurityEndpointManagementAPISortDirection `form:"sortDirection,omitempty" json:"sortDirection,omitempty"` +} + +// GetPolicyResponseParams defines parameters for GetPolicyResponse. +type GetPolicyResponseParams struct { + Query struct { + // AgentId Agent ID + AgentId *SecurityEndpointManagementAPIAgentId `json:"agentId,omitempty"` + } `form:"query" json:"query"` +} + +// CreateUpdateProtectionUpdatesNoteJSONBody defines parameters for CreateUpdateProtectionUpdatesNote. +type CreateUpdateProtectionUpdatesNoteJSONBody struct { + Note *string `json:"note,omitempty"` +} + +// DeleteEndpointListItemParams defines parameters for DeleteEndpointListItem. +type DeleteEndpointListItemParams struct { + // Id Either `id` or `item_id` must be specified + Id *SecurityEndpointExceptionsAPIExceptionListItemId `form:"id,omitempty" json:"id,omitempty"` + + // ItemId Either `id` or `item_id` must be specified + ItemId *SecurityEndpointExceptionsAPIExceptionListItemHumanId `form:"item_id,omitempty" json:"item_id,omitempty"` +} + +// ReadEndpointListItemParams defines parameters for ReadEndpointListItem. +type ReadEndpointListItemParams struct { + // Id Either `id` or `item_id` must be specified + Id *SecurityEndpointExceptionsAPIExceptionListItemId `form:"id,omitempty" json:"id,omitempty"` + + // ItemId Either `id` or `item_id` must be specified + ItemId *SecurityEndpointExceptionsAPIExceptionListItemHumanId `form:"item_id,omitempty" json:"item_id,omitempty"` +} + +// CreateEndpointListItemJSONBody defines parameters for CreateEndpointListItem. +type CreateEndpointListItemJSONBody struct { + // Comments Array of comment fields: + // + // - comment (string): Comments about the exception item. + Comments *SecurityEndpointExceptionsAPIExceptionListItemCommentArray `json:"comments,omitempty"` + + // Description Describes the exception list. + Description SecurityEndpointExceptionsAPIExceptionListItemDescription `json:"description"` + Entries SecurityEndpointExceptionsAPIExceptionListItemEntryArray `json:"entries"` + + // ItemId Human readable string identifier, e.g. `trusted-linux-processes` + ItemId *SecurityEndpointExceptionsAPIExceptionListItemHumanId `json:"item_id,omitempty"` + Meta *SecurityEndpointExceptionsAPIExceptionListItemMeta `json:"meta,omitempty"` + + // Name Exception list name. + Name SecurityEndpointExceptionsAPIExceptionListItemName `json:"name"` + OsTypes *SecurityEndpointExceptionsAPIExceptionListItemOsTypeArray `json:"os_types,omitempty"` + Tags *SecurityEndpointExceptionsAPIExceptionListItemTags `json:"tags,omitempty"` + Type SecurityEndpointExceptionsAPIExceptionListItemType `json:"type"` +} + +// UpdateEndpointListItemJSONBody defines parameters for UpdateEndpointListItem. +type UpdateEndpointListItemJSONBody struct { + UnderscoreVersion *string `json:"_version,omitempty"` + + // Comments Array of comment fields: + // + // - comment (string): Comments about the exception item. + Comments *SecurityEndpointExceptionsAPIExceptionListItemCommentArray `json:"comments,omitempty"` + + // Description Describes the exception list. + Description SecurityEndpointExceptionsAPIExceptionListItemDescription `json:"description"` + Entries SecurityEndpointExceptionsAPIExceptionListItemEntryArray `json:"entries"` + + // Id Exception's identifier. + Id *SecurityEndpointExceptionsAPIExceptionListItemId `json:"id,omitempty"` + + // ItemId Human readable string identifier, e.g. `trusted-linux-processes` + ItemId *SecurityEndpointExceptionsAPIExceptionListItemHumanId `json:"item_id,omitempty"` + Meta *SecurityEndpointExceptionsAPIExceptionListItemMeta `json:"meta,omitempty"` + + // Name Exception list name. + Name SecurityEndpointExceptionsAPIExceptionListItemName `json:"name"` + OsTypes *SecurityEndpointExceptionsAPIExceptionListItemOsTypeArray `json:"os_types,omitempty"` + Tags *SecurityEndpointExceptionsAPIExceptionListItemTags `json:"tags,omitempty"` + Type SecurityEndpointExceptionsAPIExceptionListItemType `json:"type"` +} + +// FindEndpointListItemsParams defines parameters for FindEndpointListItems. +type FindEndpointListItemsParams struct { + // Filter Filters the returned results according to the value of the specified field, + // using the `:` syntax. + Filter *SecurityEndpointExceptionsAPIFindEndpointListItemsFilter `form:"filter,omitempty" json:"filter,omitempty"` + + // Page The page number to return + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of exception list items to return per page + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // SortField Determines which field is used to sort the results + SortField *SecurityEndpointExceptionsAPINonEmptyString `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder Determines the sort order, which can be `desc` or `asc` + SortOrder *FindEndpointListItemsParamsSortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` +} + +// FindEndpointListItemsParamsSortOrder defines parameters for FindEndpointListItems. +type FindEndpointListItemsParamsSortOrder string + +// DeleteMonitoringEngineParams defines parameters for DeleteMonitoringEngine. +type DeleteMonitoringEngineParams struct { + // Data Whether to delete all the privileged user data + Data *bool `form:"data,omitempty" json:"data,omitempty"` +} + +// PrivmonBulkUploadUsersCSVMultipartBody defines parameters for PrivmonBulkUploadUsersCSV. +type PrivmonBulkUploadUsersCSVMultipartBody struct { + // File The CSV file to upload. + File openapi_types.File `json:"file"` +} + +// ListPrivMonUsersParams defines parameters for ListPrivMonUsers. +type ListPrivMonUsersParams struct { + // Kql KQL query to filter the list of monitored users + Kql *string `form:"kql,omitempty" json:"kql,omitempty"` +} + +// InitEntityStoreJSONBody defines parameters for InitEntityStore. +type InitEntityStoreJSONBody struct { + // Delay The delay before the transform will run. + Delay *string `json:"delay,omitempty"` + + // DocsPerSecond The number of documents per second to process. + DocsPerSecond *int `json:"docsPerSecond,omitempty"` + + // EnrichPolicyExecutionInterval Interval in which enrich policy runs. For example, `"1h"` means the rule runs every hour. Must be less than or equal to half the duration of the lookback period, + EnrichPolicyExecutionInterval *SecurityEntityAnalyticsAPIInterval `json:"enrichPolicyExecutionInterval,omitempty"` + EntityTypes *[]SecurityEntityAnalyticsAPIEntityType `json:"entityTypes,omitempty"` + + // FieldHistoryLength The number of historical values to keep for each field. + FieldHistoryLength *int `json:"fieldHistoryLength,omitempty"` + Filter *string `json:"filter,omitempty"` + + // Frequency The frequency at which the transform will run. + Frequency *string `json:"frequency,omitempty"` + IndexPattern *SecurityEntityAnalyticsAPIIndexPattern `json:"indexPattern,omitempty"` + + // LookbackPeriod The amount of time the transform looks back to calculate the aggregations. + LookbackPeriod *string `json:"lookbackPeriod,omitempty"` + + // MaxPageSearchSize The initial page size to use for the composite aggregation of each checkpoint. + MaxPageSearchSize *int `json:"maxPageSearchSize,omitempty"` + + // Timeout The timeout for initializing the aggregating transform. + Timeout *string `json:"timeout,omitempty"` + + // TimestampField The field to use as the timestamp. + TimestampField *string `json:"timestampField,omitempty"` +} + +// DeleteEntityEngineParams defines parameters for DeleteEntityEngine. +type DeleteEntityEngineParams struct { + // Data Control flag to also delete the entity data. + Data *bool `form:"data,omitempty" json:"data,omitempty"` +} + +// InitEntityEngineJSONBody defines parameters for InitEntityEngine. +type InitEntityEngineJSONBody struct { + // Delay The delay before the transform will run. + Delay *string `json:"delay,omitempty"` + + // DocsPerSecond The number of documents per second to process. + DocsPerSecond *int `json:"docsPerSecond,omitempty"` + + // EnrichPolicyExecutionInterval Interval in which enrich policy runs. For example, `"1h"` means the rule runs every hour. Must be less than or equal to half the duration of the lookback period, + EnrichPolicyExecutionInterval *SecurityEntityAnalyticsAPIInterval `json:"enrichPolicyExecutionInterval,omitempty"` + + // FieldHistoryLength The number of historical values to keep for each field. + FieldHistoryLength *int `json:"fieldHistoryLength,omitempty"` + Filter *string `json:"filter,omitempty"` + + // Frequency The frequency at which the transform will run. + Frequency *string `json:"frequency,omitempty"` + IndexPattern *SecurityEntityAnalyticsAPIIndexPattern `json:"indexPattern,omitempty"` + + // LookbackPeriod The amount of time the transform looks back to calculate the aggregations. + LookbackPeriod *string `json:"lookbackPeriod,omitempty"` + + // MaxPageSearchSize The initial page size to use for the composite aggregation of each checkpoint. + MaxPageSearchSize *int `json:"maxPageSearchSize,omitempty"` + + // Timeout The timeout for initializing the aggregating transform. + Timeout *string `json:"timeout,omitempty"` + + // TimestampField The field to use as the timestamp for the entity type. + TimestampField *string `json:"timestampField,omitempty"` +} + +// ListEntitiesParams defines parameters for ListEntities. +type ListEntitiesParams struct { + SortField *string `form:"sort_field,omitempty" json:"sort_field,omitempty"` + SortOrder *ListEntitiesParamsSortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` + Page *int `form:"page,omitempty" json:"page,omitempty"` + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // FilterQuery An ES query to filter by. + FilterQuery *string `form:"filterQuery,omitempty" json:"filterQuery,omitempty"` + EntityTypes []SecurityEntityAnalyticsAPIEntityType `form:"entity_types" json:"entity_types"` +} + +// ListEntitiesParamsSortOrder defines parameters for ListEntities. +type ListEntitiesParamsSortOrder string + +// GetEntityStoreStatusParams defines parameters for GetEntityStoreStatus. +type GetEntityStoreStatusParams struct { + // IncludeComponents If true returns a detailed status of the engine including all it's components + IncludeComponents *bool `form:"include_components,omitempty" json:"include_components,omitempty"` +} + +// DeleteExceptionListParams defines parameters for DeleteExceptionList. +type DeleteExceptionListParams struct { + // Id Exception list's identifier. Either `id` or `list_id` must be specified. + Id *SecurityExceptionsAPIExceptionListId `form:"id,omitempty" json:"id,omitempty"` + + // ListId Human readable exception list string identifier, e.g. `trusted-linux-processes`. Either `id` or `list_id` must be specified. + ListId *SecurityExceptionsAPIExceptionListHumanId `form:"list_id,omitempty" json:"list_id,omitempty"` + NamespaceType *SecurityExceptionsAPIExceptionNamespaceType `form:"namespace_type,omitempty" json:"namespace_type,omitempty"` +} + +// ReadExceptionListParams defines parameters for ReadExceptionList. +type ReadExceptionListParams struct { + // Id Exception list's identifier. Either `id` or `list_id` must be specified. + Id *SecurityExceptionsAPIExceptionListId `form:"id,omitempty" json:"id,omitempty"` + + // ListId Human readable exception list string identifier, e.g. `trusted-linux-processes`. Either `id` or `list_id` must be specified. + ListId *SecurityExceptionsAPIExceptionListHumanId `form:"list_id,omitempty" json:"list_id,omitempty"` + NamespaceType *SecurityExceptionsAPIExceptionNamespaceType `form:"namespace_type,omitempty" json:"namespace_type,omitempty"` +} + +// CreateExceptionListJSONBody defines parameters for CreateExceptionList. +type CreateExceptionListJSONBody struct { + // Description Describes the exception list. + Description SecurityExceptionsAPIExceptionListDescription `json:"description"` + + // ListId The exception list's human readable string identifier, `endpoint_list`. + ListId *SecurityExceptionsAPIExceptionListHumanId `json:"list_id,omitempty"` + + // Meta Placeholder for metadata about the list container. + Meta *SecurityExceptionsAPIExceptionListMeta `json:"meta,omitempty"` + + // Name The name of the exception list. + Name SecurityExceptionsAPIExceptionListName `json:"name"` + + // NamespaceType Determines whether the exception container is available in all Kibana spaces or just the space + // in which it is created, where: + // + // - `single`: Only available in the Kibana space in which it is created. + // - `agnostic`: Available in all Kibana spaces. + NamespaceType *SecurityExceptionsAPIExceptionNamespaceType `json:"namespace_type,omitempty"` + + // OsTypes Use this field to specify the operating system. Only enter one value. + OsTypes *SecurityExceptionsAPIExceptionListOsTypeArray `json:"os_types,omitempty"` + + // Tags String array containing words and phrases to help categorize exception containers. + Tags *SecurityExceptionsAPIExceptionListTags `json:"tags,omitempty"` + + // Type The type of exception list to be created. Different list types may denote where they can be utilized. + Type SecurityExceptionsAPIExceptionListType `json:"type"` + + // Version The document version, automatically increasd on updates. + Version *SecurityExceptionsAPIExceptionListVersion `json:"version,omitempty"` +} + +// UpdateExceptionListJSONBody defines parameters for UpdateExceptionList. +type UpdateExceptionListJSONBody struct { + // UnderscoreVersion The version id, normally returned by the API when the item was retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *string `json:"_version,omitempty"` + + // Description Describes the exception list. + Description SecurityExceptionsAPIExceptionListDescription `json:"description"` + + // Id Exception list's identifier. + Id *SecurityExceptionsAPIExceptionListId `json:"id,omitempty"` + + // ListId The exception list's human readable string identifier, `endpoint_list`. + ListId *SecurityExceptionsAPIExceptionListHumanId `json:"list_id,omitempty"` + + // Meta Placeholder for metadata about the list container. + Meta *SecurityExceptionsAPIExceptionListMeta `json:"meta,omitempty"` + + // Name The name of the exception list. + Name SecurityExceptionsAPIExceptionListName `json:"name"` + + // NamespaceType Determines whether the exception container is available in all Kibana spaces or just the space + // in which it is created, where: + // + // - `single`: Only available in the Kibana space in which it is created. + // - `agnostic`: Available in all Kibana spaces. + NamespaceType *SecurityExceptionsAPIExceptionNamespaceType `json:"namespace_type,omitempty"` + + // OsTypes Use this field to specify the operating system. Only enter one value. + OsTypes *SecurityExceptionsAPIExceptionListOsTypeArray `json:"os_types,omitempty"` + + // Tags String array containing words and phrases to help categorize exception containers. + Tags *SecurityExceptionsAPIExceptionListTags `json:"tags,omitempty"` + + // Type The type of exception list to be created. Different list types may denote where they can be utilized. + Type SecurityExceptionsAPIExceptionListType `json:"type"` + + // Version The document version, automatically increasd on updates. + Version *SecurityExceptionsAPIExceptionListVersion `json:"version,omitempty"` +} + +// DuplicateExceptionListParams defines parameters for DuplicateExceptionList. +type DuplicateExceptionListParams struct { + ListId SecurityExceptionsAPIExceptionListHumanId `form:"list_id" json:"list_id"` + NamespaceType SecurityExceptionsAPIExceptionNamespaceType `form:"namespace_type" json:"namespace_type"` + + // IncludeExpiredExceptions Determines whether to include expired exceptions in the duplicated list. Expiration date defined by `expire_time`. + IncludeExpiredExceptions DuplicateExceptionListParamsIncludeExpiredExceptions `form:"include_expired_exceptions" json:"include_expired_exceptions"` +} + +// DuplicateExceptionListParamsIncludeExpiredExceptions defines parameters for DuplicateExceptionList. +type DuplicateExceptionListParamsIncludeExpiredExceptions string + +// ExportExceptionListParams defines parameters for ExportExceptionList. +type ExportExceptionListParams struct { + Id SecurityExceptionsAPIExceptionListId `form:"id" json:"id"` + ListId SecurityExceptionsAPIExceptionListHumanId `form:"list_id" json:"list_id"` + NamespaceType SecurityExceptionsAPIExceptionNamespaceType `form:"namespace_type" json:"namespace_type"` + + // IncludeExpiredExceptions Determines whether to include expired exceptions in the exported list. Expiration date defined by `expire_time`. + IncludeExpiredExceptions ExportExceptionListParamsIncludeExpiredExceptions `form:"include_expired_exceptions" json:"include_expired_exceptions"` +} + +// ExportExceptionListParamsIncludeExpiredExceptions defines parameters for ExportExceptionList. +type ExportExceptionListParamsIncludeExpiredExceptions string + +// FindExceptionListsParams defines parameters for FindExceptionLists. +type FindExceptionListsParams struct { + // Filter Filters the returned results according to the value of the specified field. + // + // Uses the `so type.field name:field` value syntax, where `so type` can be: + // + // - `exception-list`: Specify a space-aware exception list. + // - `exception-list-agnostic`: Specify an exception list that is shared across spaces. + Filter *SecurityExceptionsAPIFindExceptionListsFilter `form:"filter,omitempty" json:"filter,omitempty"` + + // NamespaceType Determines whether the returned containers are Kibana associated with a Kibana space + // or available in all spaces (`agnostic` or `single`) + NamespaceType *[]SecurityExceptionsAPIExceptionNamespaceType `form:"namespace_type,omitempty" json:"namespace_type,omitempty"` + + // Page The page number to return + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of exception lists to return per page + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // SortField Determines which field is used to sort the results. + SortField *string `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder Determines the sort order, which can be `desc` or `asc`. + SortOrder *FindExceptionListsParamsSortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` +} + +// FindExceptionListsParamsSortOrder defines parameters for FindExceptionLists. +type FindExceptionListsParamsSortOrder string + +// ImportExceptionListMultipartBody defines parameters for ImportExceptionList. +type ImportExceptionListMultipartBody struct { + // File A `.ndjson` file containing the exception list + File *openapi_types.File `json:"file,omitempty"` +} + +// ImportExceptionListParams defines parameters for ImportExceptionList. +type ImportExceptionListParams struct { + // Overwrite Determines whether existing exception lists with the same `list_id` are overwritten. + // If any exception items have the same `item_id`, those are also overwritten. + Overwrite *bool `form:"overwrite,omitempty" json:"overwrite,omitempty"` + + // AsNewList Determines whether the list being imported will have a new `list_id` generated. + // Additional `item_id`'s are generated for each exception item. Both the exception + // list and its items are overwritten. + AsNewList *bool `form:"as_new_list,omitempty" json:"as_new_list,omitempty"` +} + +// DeleteExceptionListItemParams defines parameters for DeleteExceptionListItem. +type DeleteExceptionListItemParams struct { + // Id Exception item's identifier. Either `id` or `item_id` must be specified + Id *SecurityExceptionsAPIExceptionListItemId `form:"id,omitempty" json:"id,omitempty"` + + // ItemId Human readable exception item string identifier, e.g. `trusted-linux-processes`. Either `id` or `item_id` must be specified + ItemId *SecurityExceptionsAPIExceptionListItemHumanId `form:"item_id,omitempty" json:"item_id,omitempty"` + NamespaceType *SecurityExceptionsAPIExceptionNamespaceType `form:"namespace_type,omitempty" json:"namespace_type,omitempty"` +} + +// ReadExceptionListItemParams defines parameters for ReadExceptionListItem. +type ReadExceptionListItemParams struct { + // Id Exception list item's identifier. Either `id` or `item_id` must be specified. + Id *SecurityExceptionsAPIExceptionListItemId `form:"id,omitempty" json:"id,omitempty"` + + // ItemId Human readable exception item string identifier, e.g. `trusted-linux-processes`. Either `id` or `item_id` must be specified. + ItemId *SecurityExceptionsAPIExceptionListItemHumanId `form:"item_id,omitempty" json:"item_id,omitempty"` + NamespaceType *SecurityExceptionsAPIExceptionNamespaceType `form:"namespace_type,omitempty" json:"namespace_type,omitempty"` +} + +// CreateExceptionListItemJSONBody defines parameters for CreateExceptionListItem. +type CreateExceptionListItemJSONBody struct { + Comments *SecurityExceptionsAPICreateExceptionListItemCommentArray `json:"comments,omitempty"` + + // Description Describes the exception list. + Description SecurityExceptionsAPIExceptionListItemDescription `json:"description"` + Entries SecurityExceptionsAPIExceptionListItemEntryArray `json:"entries"` + + // ExpireTime The exception item’s expiration date, in ISO format. This field is only available for regular exception items, not endpoint exceptions. + ExpireTime *SecurityExceptionsAPIExceptionListItemExpireTime `json:"expire_time,omitempty"` + + // ItemId Human readable string identifier, e.g. `trusted-linux-processes` + ItemId *SecurityExceptionsAPIExceptionListItemHumanId `json:"item_id,omitempty"` + + // ListId The exception list's human readable string identifier, `endpoint_list`. + ListId SecurityExceptionsAPIExceptionListHumanId `json:"list_id"` + Meta *SecurityExceptionsAPIExceptionListItemMeta `json:"meta,omitempty"` + + // Name Exception list name. + Name SecurityExceptionsAPIExceptionListItemName `json:"name"` + + // NamespaceType Determines whether the exception container is available in all Kibana spaces or just the space + // in which it is created, where: + // + // - `single`: Only available in the Kibana space in which it is created. + // - `agnostic`: Available in all Kibana spaces. + NamespaceType *SecurityExceptionsAPIExceptionNamespaceType `json:"namespace_type,omitempty"` + OsTypes *SecurityExceptionsAPIExceptionListItemOsTypeArray `json:"os_types,omitempty"` + Tags *SecurityExceptionsAPIExceptionListItemTags `json:"tags,omitempty"` + Type SecurityExceptionsAPIExceptionListItemType `json:"type"` +} + +// UpdateExceptionListItemJSONBody defines parameters for UpdateExceptionListItem. +type UpdateExceptionListItemJSONBody struct { + // UnderscoreVersion The version id, normally returned by the API when the item was retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *string `json:"_version,omitempty"` + Comments *SecurityExceptionsAPIUpdateExceptionListItemCommentArray `json:"comments,omitempty"` + + // Description Describes the exception list. + Description SecurityExceptionsAPIExceptionListItemDescription `json:"description"` + Entries SecurityExceptionsAPIExceptionListItemEntryArray `json:"entries"` + + // ExpireTime The exception item’s expiration date, in ISO format. This field is only available for regular exception items, not endpoint exceptions. + ExpireTime *SecurityExceptionsAPIExceptionListItemExpireTime `json:"expire_time,omitempty"` + + // Id Exception's identifier. + Id *SecurityExceptionsAPIExceptionListItemId `json:"id,omitempty"` + + // ItemId Human readable string identifier, e.g. `trusted-linux-processes` + ItemId *SecurityExceptionsAPIExceptionListItemHumanId `json:"item_id,omitempty"` + + // ListId The exception list's human readable string identifier, `endpoint_list`. + ListId *SecurityExceptionsAPIExceptionListHumanId `json:"list_id,omitempty"` + Meta *SecurityExceptionsAPIExceptionListItemMeta `json:"meta,omitempty"` + + // Name Exception list name. + Name SecurityExceptionsAPIExceptionListItemName `json:"name"` + + // NamespaceType Determines whether the exception container is available in all Kibana spaces or just the space + // in which it is created, where: + // + // - `single`: Only available in the Kibana space in which it is created. + // - `agnostic`: Available in all Kibana spaces. + NamespaceType *SecurityExceptionsAPIExceptionNamespaceType `json:"namespace_type,omitempty"` + OsTypes *SecurityExceptionsAPIExceptionListItemOsTypeArray `json:"os_types,omitempty"` + Tags *SecurityExceptionsAPIExceptionListItemTags `json:"tags,omitempty"` + Type SecurityExceptionsAPIExceptionListItemType `json:"type"` +} + +// FindExceptionListItemsParams defines parameters for FindExceptionListItems. +type FindExceptionListItemsParams struct { + // ListId The `list_id`s of the items to fetch. + ListId []SecurityExceptionsAPIExceptionListHumanId `form:"list_id" json:"list_id"` + + // Filter Filters the returned results according to the value of the specified field, + // using the `:` syntax. + Filter *[]SecurityExceptionsAPIFindExceptionListItemsFilter `form:"filter,omitempty" json:"filter,omitempty"` + + // NamespaceType Determines whether the returned containers are Kibana associated with a Kibana space + // or available in all spaces (`agnostic` or `single`) + NamespaceType *[]SecurityExceptionsAPIExceptionNamespaceType `form:"namespace_type,omitempty" json:"namespace_type,omitempty"` + Search *string `form:"search,omitempty" json:"search,omitempty"` + + // Page The page number to return + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of exception list items to return per page + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // SortField Determines which field is used to sort the results. + SortField *SecurityExceptionsAPINonEmptyString `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder Determines the sort order, which can be `desc` or `asc`. + SortOrder *FindExceptionListItemsParamsSortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` +} + +// FindExceptionListItemsParamsSortOrder defines parameters for FindExceptionListItems. +type FindExceptionListItemsParamsSortOrder string + +// ReadExceptionListSummaryParams defines parameters for ReadExceptionListSummary. +type ReadExceptionListSummaryParams struct { + // Id Exception list's identifier generated upon creation. + Id *SecurityExceptionsAPIExceptionListId `form:"id,omitempty" json:"id,omitempty"` + + // ListId Exception list's human readable identifier. + ListId *SecurityExceptionsAPIExceptionListHumanId `form:"list_id,omitempty" json:"list_id,omitempty"` + NamespaceType *SecurityExceptionsAPIExceptionNamespaceType `form:"namespace_type,omitempty" json:"namespace_type,omitempty"` + + // Filter Search filter clause + Filter *string `form:"filter,omitempty" json:"filter,omitempty"` +} + +// CreateSharedExceptionListJSONBody defines parameters for CreateSharedExceptionList. +type CreateSharedExceptionListJSONBody struct { + // Description Describes the exception list. + Description SecurityExceptionsAPIExceptionListDescription `json:"description"` + + // Name The name of the exception list. + Name SecurityExceptionsAPIExceptionListName `json:"name"` +} + +// PostFleetAgentDownloadSourcesJSONBody defines parameters for PostFleetAgentDownloadSources. +type PostFleetAgentDownloadSourcesJSONBody struct { + Host string `json:"host"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + Name string `json:"name"` + + // ProxyId The ID of the proxy to use for this download source. See the proxies API for more information. + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + Key *PostFleetAgentDownloadSourcesJSONBody_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` +} + +// PostFleetAgentDownloadSourcesJSONBodySecretsSslKey0 defines parameters for PostFleetAgentDownloadSources. +type PostFleetAgentDownloadSourcesJSONBodySecretsSslKey0 struct { + Id string `json:"id"` +} + +// PostFleetAgentDownloadSourcesJSONBodySecretsSslKey1 defines parameters for PostFleetAgentDownloadSources. +type PostFleetAgentDownloadSourcesJSONBodySecretsSslKey1 = string + +// PostFleetAgentDownloadSourcesJSONBody_Secrets_Ssl_Key defines parameters for PostFleetAgentDownloadSources. +type PostFleetAgentDownloadSourcesJSONBody_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// PutFleetAgentDownloadSourcesSourceidJSONBody defines parameters for PutFleetAgentDownloadSourcesSourceid. +type PutFleetAgentDownloadSourcesSourceidJSONBody struct { + Host string `json:"host"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + Name string `json:"name"` + + // ProxyId The ID of the proxy to use for this download source. See the proxies API for more information. + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + Key *PutFleetAgentDownloadSourcesSourceidJSONBody_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` +} + +// PutFleetAgentDownloadSourcesSourceidJSONBodySecretsSslKey0 defines parameters for PutFleetAgentDownloadSourcesSourceid. +type PutFleetAgentDownloadSourcesSourceidJSONBodySecretsSslKey0 struct { + Id string `json:"id"` +} + +// PutFleetAgentDownloadSourcesSourceidJSONBodySecretsSslKey1 defines parameters for PutFleetAgentDownloadSourcesSourceid. +type PutFleetAgentDownloadSourcesSourceidJSONBodySecretsSslKey1 = string + +// PutFleetAgentDownloadSourcesSourceidJSONBody_Secrets_Ssl_Key defines parameters for PutFleetAgentDownloadSourcesSourceid. +type PutFleetAgentDownloadSourcesSourceidJSONBody_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// GetFleetAgentPoliciesParams defines parameters for GetFleetAgentPolicies. +type GetFleetAgentPoliciesParams struct { + Page *float32 `form:"page,omitempty" json:"page,omitempty"` + PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` + SortField *string `form:"sortField,omitempty" json:"sortField,omitempty"` + SortOrder *GetFleetAgentPoliciesParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` + ShowUpgradeable *bool `form:"showUpgradeable,omitempty" json:"showUpgradeable,omitempty"` + Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` + + // NoAgentCount use withAgentCount instead + NoAgentCount *bool `form:"noAgentCount,omitempty" json:"noAgentCount,omitempty"` + + // WithAgentCount get policies with agent count + WithAgentCount *bool `form:"withAgentCount,omitempty" json:"withAgentCount,omitempty"` + + // Full get full policies with package policies populated + Full *bool `form:"full,omitempty" json:"full,omitempty"` + Format *GetFleetAgentPoliciesParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// GetFleetAgentPoliciesParamsSortOrder defines parameters for GetFleetAgentPolicies. +type GetFleetAgentPoliciesParamsSortOrder string + +// GetFleetAgentPoliciesParamsFormat defines parameters for GetFleetAgentPolicies. +type GetFleetAgentPoliciesParamsFormat string + +// PostFleetAgentPoliciesJSONBody defines parameters for PostFleetAgentPolicies. +type PostFleetAgentPoliciesJSONBody struct { + AdvancedSettings *struct { + AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory,omitempty"` + AgentDownloadTimeout interface{} `json:"agent_download_timeout,omitempty"` + AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs,omitempty"` + AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval,omitempty"` + AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles,omitempty"` + AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes,omitempty"` + AgentLoggingLevel interface{} `json:"agent_logging_level,omitempty"` + AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period,omitempty"` + AgentLoggingToFiles interface{} `json:"agent_logging_to_files,omitempty"` + AgentMonitoringRuntimeExperimental interface{} `json:"agent_monitoring_runtime_experimental,omitempty"` + } `json:"advanced_settings,omitempty"` + AgentFeatures *[]struct { + Enabled bool `json:"enabled"` + Name string `json:"name"` + } `json:"agent_features,omitempty"` + Agentless *struct { + CloudConnectors *struct { + Enabled bool `json:"enabled"` + TargetCsp *string `json:"target_csp,omitempty"` + } `json:"cloud_connectors,omitempty"` + Resources *struct { + Requests *struct { + Cpu *string `json:"cpu,omitempty"` + Memory *string `json:"memory,omitempty"` + } `json:"requests,omitempty"` + } `json:"resources,omitempty"` + } `json:"agentless,omitempty"` + DataOutputId *string `json:"data_output_id,omitempty"` + Description *string `json:"description,omitempty"` + DownloadSourceId *string `json:"download_source_id,omitempty"` + FleetServerHostId *string `json:"fleet_server_host_id,omitempty"` + Force *bool `json:"force,omitempty"` + + // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. + GlobalDataTags *[]AgentPolicyGlobalDataTagsItem `json:"global_data_tags,omitempty"` + HasFleetServer *bool `json:"has_fleet_server,omitempty"` + Id *string `json:"id,omitempty"` + InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` + IsManaged *bool `json:"is_managed,omitempty"` + IsProtected *bool `json:"is_protected,omitempty"` + + // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled + KeepMonitoringAlive *bool `json:"keep_monitoring_alive,omitempty"` + MonitoringDiagnostics *struct { + Limit *struct { + Burst *float32 `json:"burst,omitempty"` + Interval *string `json:"interval,omitempty"` + } `json:"limit,omitempty"` + Uploader *struct { + InitDur *string `json:"init_dur,omitempty"` + MaxDur *string `json:"max_dur,omitempty"` + MaxRetries *float32 `json:"max_retries,omitempty"` + } `json:"uploader,omitempty"` + } `json:"monitoring_diagnostics,omitempty"` + MonitoringEnabled *[]PostFleetAgentPoliciesJSONBodyMonitoringEnabled `json:"monitoring_enabled,omitempty"` + MonitoringHttp *struct { + Buffer *struct { + Enabled *bool `json:"enabled,omitempty"` + } `json:"buffer,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + Host *string `json:"host,omitempty"` + Port *float32 `json:"port,omitempty"` + } `json:"monitoring_http,omitempty"` + MonitoringOutputId *string `json:"monitoring_output_id,omitempty"` + MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` + Name string `json:"name"` + Namespace string `json:"namespace"` + + // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *map[string]interface{} `json:"overrides,omitempty"` + RequiredVersions *[]struct { + // Percentage Target percentage of agents to auto upgrade + Percentage float32 `json:"percentage"` + + // Version Target version for automatic agent upgrade + Version string `json:"version"` + } `json:"required_versions,omitempty"` + SpaceIds *[]string `json:"space_ids,omitempty"` + + // SupportsAgentless Indicates whether the agent policy supports agentless integrations. + SupportsAgentless *bool `json:"supports_agentless,omitempty"` + UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` +} + +// PostFleetAgentPoliciesParams defines parameters for PostFleetAgentPolicies. +type PostFleetAgentPoliciesParams struct { + SysMonitoring *bool `form:"sys_monitoring,omitempty" json:"sys_monitoring,omitempty"` +} + +// PostFleetAgentPoliciesJSONBodyMonitoringEnabled defines parameters for PostFleetAgentPolicies. +type PostFleetAgentPoliciesJSONBodyMonitoringEnabled string + +// PostFleetAgentPoliciesBulkGetJSONBody defines parameters for PostFleetAgentPoliciesBulkGet. +type PostFleetAgentPoliciesBulkGetJSONBody struct { + // Full get full policies with package policies populated + Full *bool `json:"full,omitempty"` + + // Ids list of package policy ids + Ids []string `json:"ids"` + IgnoreMissing *bool `json:"ignoreMissing,omitempty"` +} + +// PostFleetAgentPoliciesBulkGetParams defines parameters for PostFleetAgentPoliciesBulkGet. +type PostFleetAgentPoliciesBulkGetParams struct { + Format *PostFleetAgentPoliciesBulkGetParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// PostFleetAgentPoliciesBulkGetParamsFormat defines parameters for PostFleetAgentPoliciesBulkGet. +type PostFleetAgentPoliciesBulkGetParamsFormat string + +// PostFleetAgentPoliciesDeleteJSONBody defines parameters for PostFleetAgentPoliciesDelete. +type PostFleetAgentPoliciesDeleteJSONBody struct { + AgentPolicyId string `json:"agentPolicyId"` + + // Force bypass validation checks that can prevent agent policy deletion + Force *bool `json:"force,omitempty"` +} + +// PostFleetAgentPoliciesOutputsJSONBody defines parameters for PostFleetAgentPoliciesOutputs. +type PostFleetAgentPoliciesOutputsJSONBody struct { + // Ids list of package policy ids + Ids []string `json:"ids"` +} + +// GetFleetAgentPoliciesAgentpolicyidParams defines parameters for GetFleetAgentPoliciesAgentpolicyid. +type GetFleetAgentPoliciesAgentpolicyidParams struct { + Format *GetFleetAgentPoliciesAgentpolicyidParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// GetFleetAgentPoliciesAgentpolicyidParamsFormat defines parameters for GetFleetAgentPoliciesAgentpolicyid. +type GetFleetAgentPoliciesAgentpolicyidParamsFormat string + +// PutFleetAgentPoliciesAgentpolicyidJSONBody defines parameters for PutFleetAgentPoliciesAgentpolicyid. +type PutFleetAgentPoliciesAgentpolicyidJSONBody struct { + AdvancedSettings *struct { + AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory,omitempty"` + AgentDownloadTimeout interface{} `json:"agent_download_timeout,omitempty"` + AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs,omitempty"` + AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval,omitempty"` + AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles,omitempty"` + AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes,omitempty"` + AgentLoggingLevel interface{} `json:"agent_logging_level,omitempty"` + AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period,omitempty"` + AgentLoggingToFiles interface{} `json:"agent_logging_to_files,omitempty"` + AgentMonitoringRuntimeExperimental interface{} `json:"agent_monitoring_runtime_experimental,omitempty"` + } `json:"advanced_settings,omitempty"` + AgentFeatures *[]struct { + Enabled bool `json:"enabled"` + Name string `json:"name"` + } `json:"agent_features,omitempty"` + Agentless *struct { + CloudConnectors *struct { + Enabled bool `json:"enabled"` + TargetCsp *string `json:"target_csp,omitempty"` + } `json:"cloud_connectors,omitempty"` + Resources *struct { + Requests *struct { + Cpu *string `json:"cpu,omitempty"` + Memory *string `json:"memory,omitempty"` + } `json:"requests,omitempty"` + } `json:"resources,omitempty"` + } `json:"agentless,omitempty"` + BumpRevision *bool `json:"bumpRevision,omitempty"` + DataOutputId *string `json:"data_output_id,omitempty"` + Description *string `json:"description,omitempty"` + DownloadSourceId *string `json:"download_source_id,omitempty"` + FleetServerHostId *string `json:"fleet_server_host_id,omitempty"` + Force *bool `json:"force,omitempty"` + + // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. + GlobalDataTags *[]AgentPolicyGlobalDataTagsItem `json:"global_data_tags,omitempty"` + HasFleetServer *bool `json:"has_fleet_server,omitempty"` + Id *string `json:"id,omitempty"` + InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` + IsManaged *bool `json:"is_managed,omitempty"` + IsProtected *bool `json:"is_protected,omitempty"` + + // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled + KeepMonitoringAlive *bool `json:"keep_monitoring_alive,omitempty"` + MonitoringDiagnostics *struct { + Limit *struct { + Burst *float32 `json:"burst,omitempty"` + Interval *string `json:"interval,omitempty"` + } `json:"limit,omitempty"` + Uploader *struct { + InitDur *string `json:"init_dur,omitempty"` + MaxDur *string `json:"max_dur,omitempty"` + MaxRetries *float32 `json:"max_retries,omitempty"` + } `json:"uploader,omitempty"` + } `json:"monitoring_diagnostics,omitempty"` + MonitoringEnabled *[]PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled `json:"monitoring_enabled,omitempty"` + MonitoringHttp *struct { + Buffer *struct { + Enabled *bool `json:"enabled,omitempty"` + } `json:"buffer,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + Host *string `json:"host,omitempty"` + Port *float32 `json:"port,omitempty"` + } `json:"monitoring_http,omitempty"` + MonitoringOutputId *string `json:"monitoring_output_id,omitempty"` + MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` + Name string `json:"name"` + Namespace string `json:"namespace"` + + // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *map[string]interface{} `json:"overrides,omitempty"` + RequiredVersions *[]struct { + // Percentage Target percentage of agents to auto upgrade + Percentage float32 `json:"percentage"` + + // Version Target version for automatic agent upgrade + Version string `json:"version"` + } `json:"required_versions,omitempty"` + SpaceIds *[]string `json:"space_ids,omitempty"` + + // SupportsAgentless Indicates whether the agent policy supports agentless integrations. + SupportsAgentless *bool `json:"supports_agentless,omitempty"` + UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` +} + +// PutFleetAgentPoliciesAgentpolicyidParams defines parameters for PutFleetAgentPoliciesAgentpolicyid. +type PutFleetAgentPoliciesAgentpolicyidParams struct { + Format *PutFleetAgentPoliciesAgentpolicyidParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// PutFleetAgentPoliciesAgentpolicyidParamsFormat defines parameters for PutFleetAgentPoliciesAgentpolicyid. +type PutFleetAgentPoliciesAgentpolicyidParamsFormat string + +// PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled defines parameters for PutFleetAgentPoliciesAgentpolicyid. +type PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled string + +// PostFleetAgentPoliciesAgentpolicyidCopyJSONBody defines parameters for PostFleetAgentPoliciesAgentpolicyidCopy. +type PostFleetAgentPoliciesAgentpolicyidCopyJSONBody struct { + Description *string `json:"description,omitempty"` + Name string `json:"name"` +} + +// PostFleetAgentPoliciesAgentpolicyidCopyParams defines parameters for PostFleetAgentPoliciesAgentpolicyidCopy. +type PostFleetAgentPoliciesAgentpolicyidCopyParams struct { + Format *PostFleetAgentPoliciesAgentpolicyidCopyParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// PostFleetAgentPoliciesAgentpolicyidCopyParamsFormat defines parameters for PostFleetAgentPoliciesAgentpolicyidCopy. +type PostFleetAgentPoliciesAgentpolicyidCopyParamsFormat string + +// GetFleetAgentPoliciesAgentpolicyidDownloadParams defines parameters for GetFleetAgentPoliciesAgentpolicyidDownload. +type GetFleetAgentPoliciesAgentpolicyidDownloadParams struct { + Download *bool `form:"download,omitempty" json:"download,omitempty"` + Standalone *bool `form:"standalone,omitempty" json:"standalone,omitempty"` + Kubernetes *bool `form:"kubernetes,omitempty" json:"kubernetes,omitempty"` +} + +// GetFleetAgentPoliciesAgentpolicyidFullParams defines parameters for GetFleetAgentPoliciesAgentpolicyidFull. +type GetFleetAgentPoliciesAgentpolicyidFullParams struct { + Download *bool `form:"download,omitempty" json:"download,omitempty"` + Standalone *bool `form:"standalone,omitempty" json:"standalone,omitempty"` + Kubernetes *bool `form:"kubernetes,omitempty" json:"kubernetes,omitempty"` +} + +// GetFleetAgentStatusParams defines parameters for GetFleetAgentStatus. +type GetFleetAgentStatusParams struct { + PolicyId *string `form:"policyId,omitempty" json:"policyId,omitempty"` + PolicyIds *struct { + union json.RawMessage + } `form:"policyIds,omitempty" json:"policyIds,omitempty"` + Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` +} + +// GetFleetAgentStatusParamsPolicyIds0 defines parameters for GetFleetAgentStatus. +type GetFleetAgentStatusParamsPolicyIds0 = []string + +// GetFleetAgentStatusParamsPolicyIds1 defines parameters for GetFleetAgentStatus. +type GetFleetAgentStatusParamsPolicyIds1 = string + +// GetFleetAgentStatusDataParams defines parameters for GetFleetAgentStatusData. +type GetFleetAgentStatusDataParams struct { + AgentsIds struct { + union json.RawMessage + } `form:"agentsIds" json:"agentsIds"` + PkgName *string `form:"pkgName,omitempty" json:"pkgName,omitempty"` + PkgVersion *string `form:"pkgVersion,omitempty" json:"pkgVersion,omitempty"` + PreviewData *bool `form:"previewData,omitempty" json:"previewData,omitempty"` +} + +// GetFleetAgentStatusDataParamsAgentsIds0 defines parameters for GetFleetAgentStatusData. +type GetFleetAgentStatusDataParamsAgentsIds0 = []string + +// GetFleetAgentStatusDataParamsAgentsIds1 defines parameters for GetFleetAgentStatusData. +type GetFleetAgentStatusDataParamsAgentsIds1 = string + +// GetFleetAgentsParams defines parameters for GetFleetAgents. +type GetFleetAgentsParams struct { + Page *float32 `form:"page,omitempty" json:"page,omitempty"` + PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` + Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` + ShowAgentless *bool `form:"showAgentless,omitempty" json:"showAgentless,omitempty"` + ShowInactive *bool `form:"showInactive,omitempty" json:"showInactive,omitempty"` + WithMetrics *bool `form:"withMetrics,omitempty" json:"withMetrics,omitempty"` + ShowUpgradeable *bool `form:"showUpgradeable,omitempty" json:"showUpgradeable,omitempty"` + GetStatusSummary *bool `form:"getStatusSummary,omitempty" json:"getStatusSummary,omitempty"` + SortField *string `form:"sortField,omitempty" json:"sortField,omitempty"` + SortOrder *GetFleetAgentsParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` + SearchAfter *string `form:"searchAfter,omitempty" json:"searchAfter,omitempty"` + OpenPit *bool `form:"openPit,omitempty" json:"openPit,omitempty"` + PitId *string `form:"pitId,omitempty" json:"pitId,omitempty"` + PitKeepAlive *string `form:"pitKeepAlive,omitempty" json:"pitKeepAlive,omitempty"` +} + +// GetFleetAgentsParamsSortOrder defines parameters for GetFleetAgents. +type GetFleetAgentsParamsSortOrder string + +// PostFleetAgentsJSONBody defines parameters for PostFleetAgents. +type PostFleetAgentsJSONBody struct { + ActionIds []string `json:"actionIds"` +} + +// GetFleetAgentsActionStatusParams defines parameters for GetFleetAgentsActionStatus. +type GetFleetAgentsActionStatusParams struct { + Page *float32 `form:"page,omitempty" json:"page,omitempty"` + PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` + Date *string `form:"date,omitempty" json:"date,omitempty"` + Latest *float32 `form:"latest,omitempty" json:"latest,omitempty"` + ErrorSize *float32 `form:"errorSize,omitempty" json:"errorSize,omitempty"` +} + +// PostFleetAgentsBulkReassignJSONBody defines parameters for PostFleetAgentsBulkReassign. +type PostFleetAgentsBulkReassignJSONBody struct { + Agents PostFleetAgentsBulkReassignJSONBody_Agents `json:"agents"` + BatchSize *float32 `json:"batchSize,omitempty"` + IncludeInactive *bool `json:"includeInactive,omitempty"` + PolicyId string `json:"policy_id"` +} + +// PostFleetAgentsBulkReassignJSONBodyAgents0 defines parameters for PostFleetAgentsBulkReassign. +type PostFleetAgentsBulkReassignJSONBodyAgents0 = []string + +// PostFleetAgentsBulkReassignJSONBodyAgents1 defines parameters for PostFleetAgentsBulkReassign. +type PostFleetAgentsBulkReassignJSONBodyAgents1 = string + +// PostFleetAgentsBulkReassignJSONBody_Agents defines parameters for PostFleetAgentsBulkReassign. +type PostFleetAgentsBulkReassignJSONBody_Agents struct { + union json.RawMessage +} + +// PostFleetAgentsBulkRequestDiagnosticsJSONBody defines parameters for PostFleetAgentsBulkRequestDiagnostics. +type PostFleetAgentsBulkRequestDiagnosticsJSONBody struct { + AdditionalMetrics *[]PostFleetAgentsBulkRequestDiagnosticsJSONBodyAdditionalMetrics `json:"additional_metrics,omitempty"` + Agents PostFleetAgentsBulkRequestDiagnosticsJSONBody_Agents `json:"agents"` + BatchSize *float32 `json:"batchSize,omitempty"` +} + +// PostFleetAgentsBulkRequestDiagnosticsJSONBodyAdditionalMetrics defines parameters for PostFleetAgentsBulkRequestDiagnostics. +type PostFleetAgentsBulkRequestDiagnosticsJSONBodyAdditionalMetrics string + +// PostFleetAgentsBulkRequestDiagnosticsJSONBodyAgents0 defines parameters for PostFleetAgentsBulkRequestDiagnostics. +type PostFleetAgentsBulkRequestDiagnosticsJSONBodyAgents0 = []string + +// PostFleetAgentsBulkRequestDiagnosticsJSONBodyAgents1 defines parameters for PostFleetAgentsBulkRequestDiagnostics. +type PostFleetAgentsBulkRequestDiagnosticsJSONBodyAgents1 = string + +// PostFleetAgentsBulkRequestDiagnosticsJSONBody_Agents defines parameters for PostFleetAgentsBulkRequestDiagnostics. +type PostFleetAgentsBulkRequestDiagnosticsJSONBody_Agents struct { + union json.RawMessage +} + +// PostFleetAgentsBulkUnenrollJSONBody defines parameters for PostFleetAgentsBulkUnenroll. +type PostFleetAgentsBulkUnenrollJSONBody struct { + Agents PostFleetAgentsBulkUnenrollJSONBody_Agents `json:"agents"` + BatchSize *float32 `json:"batchSize,omitempty"` + + // Force Unenrolls hosted agents too + Force *bool `json:"force,omitempty"` + + // IncludeInactive When passing agents by KQL query, unenrolls inactive agents too + IncludeInactive *bool `json:"includeInactive,omitempty"` + + // Revoke Revokes API keys of agents + Revoke *bool `json:"revoke,omitempty"` +} + +// PostFleetAgentsBulkUnenrollJSONBodyAgents0 defines parameters for PostFleetAgentsBulkUnenroll. +type PostFleetAgentsBulkUnenrollJSONBodyAgents0 = []string + +// PostFleetAgentsBulkUnenrollJSONBodyAgents1 defines parameters for PostFleetAgentsBulkUnenroll. +type PostFleetAgentsBulkUnenrollJSONBodyAgents1 = string + +// PostFleetAgentsBulkUnenrollJSONBody_Agents defines parameters for PostFleetAgentsBulkUnenroll. +type PostFleetAgentsBulkUnenrollJSONBody_Agents struct { + union json.RawMessage +} + +// PostFleetAgentsBulkUpdateAgentTagsJSONBody defines parameters for PostFleetAgentsBulkUpdateAgentTags. +type PostFleetAgentsBulkUpdateAgentTagsJSONBody struct { + Agents PostFleetAgentsBulkUpdateAgentTagsJSONBody_Agents `json:"agents"` + BatchSize *float32 `json:"batchSize,omitempty"` + IncludeInactive *bool `json:"includeInactive,omitempty"` + TagsToAdd *[]string `json:"tagsToAdd,omitempty"` + TagsToRemove *[]string `json:"tagsToRemove,omitempty"` +} + +// PostFleetAgentsBulkUpdateAgentTagsJSONBodyAgents0 defines parameters for PostFleetAgentsBulkUpdateAgentTags. +type PostFleetAgentsBulkUpdateAgentTagsJSONBodyAgents0 = []string + +// PostFleetAgentsBulkUpdateAgentTagsJSONBodyAgents1 defines parameters for PostFleetAgentsBulkUpdateAgentTags. +type PostFleetAgentsBulkUpdateAgentTagsJSONBodyAgents1 = string + +// PostFleetAgentsBulkUpdateAgentTagsJSONBody_Agents defines parameters for PostFleetAgentsBulkUpdateAgentTags. +type PostFleetAgentsBulkUpdateAgentTagsJSONBody_Agents struct { + union json.RawMessage +} + +// PostFleetAgentsBulkUpgradeJSONBody defines parameters for PostFleetAgentsBulkUpgrade. +type PostFleetAgentsBulkUpgradeJSONBody struct { + Agents PostFleetAgentsBulkUpgradeJSONBody_Agents `json:"agents"` + BatchSize *float32 `json:"batchSize,omitempty"` + Force *bool `json:"force,omitempty"` + IncludeInactive *bool `json:"includeInactive,omitempty"` + RolloutDurationSeconds *float32 `json:"rollout_duration_seconds,omitempty"` + SkipRateLimitCheck *bool `json:"skipRateLimitCheck,omitempty"` + SourceUri *string `json:"source_uri,omitempty"` + StartTime *string `json:"start_time,omitempty"` + Version string `json:"version"` +} + +// PostFleetAgentsBulkUpgradeJSONBodyAgents0 defines parameters for PostFleetAgentsBulkUpgrade. +type PostFleetAgentsBulkUpgradeJSONBodyAgents0 = []string + +// PostFleetAgentsBulkUpgradeJSONBodyAgents1 defines parameters for PostFleetAgentsBulkUpgrade. +type PostFleetAgentsBulkUpgradeJSONBodyAgents1 = string + +// PostFleetAgentsBulkUpgradeJSONBody_Agents defines parameters for PostFleetAgentsBulkUpgrade. +type PostFleetAgentsBulkUpgradeJSONBody_Agents struct { + union json.RawMessage +} + +// GetFleetAgentsTagsParams defines parameters for GetFleetAgentsTags. +type GetFleetAgentsTagsParams struct { + Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` + ShowInactive *bool `form:"showInactive,omitempty" json:"showInactive,omitempty"` +} + +// GetFleetAgentsAgentidParams defines parameters for GetFleetAgentsAgentid. +type GetFleetAgentsAgentidParams struct { + WithMetrics *bool `form:"withMetrics,omitempty" json:"withMetrics,omitempty"` +} + +// PutFleetAgentsAgentidJSONBody defines parameters for PutFleetAgentsAgentid. +type PutFleetAgentsAgentidJSONBody struct { + Tags *[]string `json:"tags,omitempty"` + UserProvidedMetadata *map[string]interface{} `json:"user_provided_metadata,omitempty"` +} + +// PostFleetAgentsAgentidActionsJSONBody defines parameters for PostFleetAgentsAgentidActions. +type PostFleetAgentsAgentidActionsJSONBody struct { + Action PostFleetAgentsAgentidActionsJSONBody_Action `json:"action"` +} + +// PostFleetAgentsAgentidActionsJSONBodyAction0 defines parameters for PostFleetAgentsAgentidActions. +type PostFleetAgentsAgentidActionsJSONBodyAction0 struct { + AckData interface{} `json:"ack_data"` + Data interface{} `json:"data"` + Type PostFleetAgentsAgentidActionsJSONBodyAction0Type `json:"type"` +} + +// PostFleetAgentsAgentidActionsJSONBodyAction0Type defines parameters for PostFleetAgentsAgentidActions. +type PostFleetAgentsAgentidActionsJSONBodyAction0Type string + +// PostFleetAgentsAgentidActionsJSONBodyAction1 defines parameters for PostFleetAgentsAgentidActions. +type PostFleetAgentsAgentidActionsJSONBodyAction1 struct { + Data struct { + LogLevel *PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevel `json:"log_level,omitempty"` + } `json:"data"` + Type PostFleetAgentsAgentidActionsJSONBodyAction1Type `json:"type"` +} + +// PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevel defines parameters for PostFleetAgentsAgentidActions. +type PostFleetAgentsAgentidActionsJSONBodyAction1DataLogLevel string + +// PostFleetAgentsAgentidActionsJSONBodyAction1Type defines parameters for PostFleetAgentsAgentidActions. +type PostFleetAgentsAgentidActionsJSONBodyAction1Type string + +// PostFleetAgentsAgentidActionsJSONBody_Action defines parameters for PostFleetAgentsAgentidActions. +type PostFleetAgentsAgentidActionsJSONBody_Action struct { + union json.RawMessage +} + +// PostFleetAgentsAgentidReassignJSONBody defines parameters for PostFleetAgentsAgentidReassign. +type PostFleetAgentsAgentidReassignJSONBody struct { + PolicyId string `json:"policy_id"` +} + +// PostFleetAgentsAgentidRequestDiagnosticsJSONBody defines parameters for PostFleetAgentsAgentidRequestDiagnostics. +type PostFleetAgentsAgentidRequestDiagnosticsJSONBody struct { + AdditionalMetrics *[]PostFleetAgentsAgentidRequestDiagnosticsJSONBodyAdditionalMetrics `json:"additional_metrics,omitempty"` +} + +// PostFleetAgentsAgentidRequestDiagnosticsJSONBodyAdditionalMetrics defines parameters for PostFleetAgentsAgentidRequestDiagnostics. +type PostFleetAgentsAgentidRequestDiagnosticsJSONBodyAdditionalMetrics string + +// PostFleetAgentsAgentidUnenrollJSONBody defines parameters for PostFleetAgentsAgentidUnenroll. +type PostFleetAgentsAgentidUnenrollJSONBody struct { + Force *bool `json:"force,omitempty"` + Revoke *bool `json:"revoke,omitempty"` +} + +// PostFleetAgentsAgentidUpgradeJSONBody defines parameters for PostFleetAgentsAgentidUpgrade. +type PostFleetAgentsAgentidUpgradeJSONBody struct { + Force *bool `json:"force,omitempty"` + SkipRateLimitCheck *bool `json:"skipRateLimitCheck,omitempty"` + SourceUri *string `json:"source_uri,omitempty"` + Version string `json:"version"` +} + +// GetFleetCheckPermissionsParams defines parameters for GetFleetCheckPermissions. +type GetFleetCheckPermissionsParams struct { + FleetServerSetup *bool `form:"fleetServerSetup,omitempty" json:"fleetServerSetup,omitempty"` +} + +// GetFleetEnrollmentApiKeysParams defines parameters for GetFleetEnrollmentApiKeys. +type GetFleetEnrollmentApiKeysParams struct { + Page *float32 `form:"page,omitempty" json:"page,omitempty"` + PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` + Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` +} + +// PostFleetEnrollmentApiKeysJSONBody defines parameters for PostFleetEnrollmentApiKeys. +type PostFleetEnrollmentApiKeysJSONBody struct { + Expiration *string `json:"expiration,omitempty"` + Name *string `json:"name,omitempty"` + PolicyId string `json:"policy_id"` +} + +// PostFleetEpmBulkAssetsJSONBody defines parameters for PostFleetEpmBulkAssets. +type PostFleetEpmBulkAssetsJSONBody struct { + AssetIds []struct { + Id string `json:"id"` + Type string `json:"type"` + } `json:"assetIds"` +} + +// GetFleetEpmCategoriesParams defines parameters for GetFleetEpmCategories. +type GetFleetEpmCategoriesParams struct { + Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` + IncludePolicyTemplates *bool `form:"include_policy_templates,omitempty" json:"include_policy_templates,omitempty"` +} + +// PostFleetEpmCustomIntegrationsJSONBody defines parameters for PostFleetEpmCustomIntegrations. +type PostFleetEpmCustomIntegrationsJSONBody struct { + Datasets []struct { + Name string `json:"name"` + Type PostFleetEpmCustomIntegrationsJSONBodyDatasetsType `json:"type"` + } `json:"datasets"` + Force *bool `json:"force,omitempty"` + IntegrationName string `json:"integrationName"` +} + +// PostFleetEpmCustomIntegrationsJSONBodyDatasetsType defines parameters for PostFleetEpmCustomIntegrations. +type PostFleetEpmCustomIntegrationsJSONBodyDatasetsType string + +// PutFleetEpmCustomIntegrationsPkgnameJSONBody defines parameters for PutFleetEpmCustomIntegrationsPkgname. +type PutFleetEpmCustomIntegrationsPkgnameJSONBody struct { + Categories *[]string `json:"categories,omitempty"` + ReadMeData string `json:"readMeData"` +} + +// GetFleetEpmDataStreamsParams defines parameters for GetFleetEpmDataStreams. +type GetFleetEpmDataStreamsParams struct { + Type *GetFleetEpmDataStreamsParamsType `form:"type,omitempty" json:"type,omitempty"` + DatasetQuery *string `form:"datasetQuery,omitempty" json:"datasetQuery,omitempty"` + SortOrder *GetFleetEpmDataStreamsParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` + UncategorisedOnly *bool `form:"uncategorisedOnly,omitempty" json:"uncategorisedOnly,omitempty"` +} + +// GetFleetEpmDataStreamsParamsType defines parameters for GetFleetEpmDataStreams. +type GetFleetEpmDataStreamsParamsType string + +// GetFleetEpmDataStreamsParamsSortOrder defines parameters for GetFleetEpmDataStreams. +type GetFleetEpmDataStreamsParamsSortOrder string + +// GetFleetEpmPackagesParams defines parameters for GetFleetEpmPackages. +type GetFleetEpmPackagesParams struct { + Category *string `form:"category,omitempty" json:"category,omitempty"` + Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` + ExcludeInstallStatus *bool `form:"excludeInstallStatus,omitempty" json:"excludeInstallStatus,omitempty"` + WithPackagePoliciesCount *bool `form:"withPackagePoliciesCount,omitempty" json:"withPackagePoliciesCount,omitempty"` +} + +// PostFleetEpmPackagesParams defines parameters for PostFleetEpmPackages. +type PostFleetEpmPackagesParams struct { + IgnoreMappingUpdateErrors *bool `form:"ignoreMappingUpdateErrors,omitempty" json:"ignoreMappingUpdateErrors,omitempty"` + SkipDataStreamRollover *bool `form:"skipDataStreamRollover,omitempty" json:"skipDataStreamRollover,omitempty"` +} + +// PostFleetEpmPackagesBulkJSONBody defines parameters for PostFleetEpmPackagesBulk. +type PostFleetEpmPackagesBulkJSONBody struct { + Force *bool `json:"force,omitempty"` + Packages []PostFleetEpmPackagesBulkJSONBody_Packages_Item `json:"packages"` +} + +// PostFleetEpmPackagesBulkParams defines parameters for PostFleetEpmPackagesBulk. +type PostFleetEpmPackagesBulkParams struct { + Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` +} + +// PostFleetEpmPackagesBulkJSONBodyPackages0 defines parameters for PostFleetEpmPackagesBulk. +type PostFleetEpmPackagesBulkJSONBodyPackages0 = string + +// PostFleetEpmPackagesBulkJSONBodyPackages1 defines parameters for PostFleetEpmPackagesBulk. +type PostFleetEpmPackagesBulkJSONBodyPackages1 struct { + Name string `json:"name"` + Prerelease *bool `json:"prerelease,omitempty"` + Version string `json:"version"` +} + +// PostFleetEpmPackagesBulkJSONBody_Packages_Item defines parameters for PostFleetEpmPackagesBulk. +type PostFleetEpmPackagesBulkJSONBody_Packages_Item struct { + union json.RawMessage +} + +// PostFleetEpmPackagesBulkUninstallJSONBody defines parameters for PostFleetEpmPackagesBulkUninstall. +type PostFleetEpmPackagesBulkUninstallJSONBody struct { + Force *bool `json:"force,omitempty"` + Packages []struct { + Name string `json:"name"` + Version string `json:"version"` + } `json:"packages"` +} + +// PostFleetEpmPackagesBulkUpgradeJSONBody defines parameters for PostFleetEpmPackagesBulkUpgrade. +type PostFleetEpmPackagesBulkUpgradeJSONBody struct { + Force *bool `json:"force,omitempty"` + Packages []struct { + Name string `json:"name"` + Version *string `json:"version,omitempty"` + } `json:"packages"` + Prerelease *bool `json:"prerelease,omitempty"` + UpgradePackagePolicies *bool `json:"upgrade_package_policies,omitempty"` +} + +// GetFleetEpmPackagesInstalledParams defines parameters for GetFleetEpmPackagesInstalled. +type GetFleetEpmPackagesInstalledParams struct { + DataStreamType *GetFleetEpmPackagesInstalledParamsDataStreamType `form:"dataStreamType,omitempty" json:"dataStreamType,omitempty"` + ShowOnlyActiveDataStreams *bool `form:"showOnlyActiveDataStreams,omitempty" json:"showOnlyActiveDataStreams,omitempty"` + NameQuery *string `form:"nameQuery,omitempty" json:"nameQuery,omitempty"` + SearchAfter *[]GetFleetEpmPackagesInstalledParams_SearchAfter_Item `form:"searchAfter,omitempty" json:"searchAfter,omitempty"` + PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` + SortOrder *GetFleetEpmPackagesInstalledParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` +} + +// GetFleetEpmPackagesInstalledParamsDataStreamType defines parameters for GetFleetEpmPackagesInstalled. +type GetFleetEpmPackagesInstalledParamsDataStreamType string + +// GetFleetEpmPackagesInstalledParamsSearchAfter0 defines parameters for GetFleetEpmPackagesInstalled. +type GetFleetEpmPackagesInstalledParamsSearchAfter0 = string + +// GetFleetEpmPackagesInstalledParamsSearchAfter1 defines parameters for GetFleetEpmPackagesInstalled. +type GetFleetEpmPackagesInstalledParamsSearchAfter1 = float32 + +// GetFleetEpmPackagesInstalledParams_SearchAfter_Item defines parameters for GetFleetEpmPackagesInstalled. +type GetFleetEpmPackagesInstalledParams_SearchAfter_Item struct { + union json.RawMessage +} + +// GetFleetEpmPackagesInstalledParamsSortOrder defines parameters for GetFleetEpmPackagesInstalled. +type GetFleetEpmPackagesInstalledParamsSortOrder string + +// DeleteFleetEpmPackagesPkgnamePkgversionParams defines parameters for DeleteFleetEpmPackagesPkgnamePkgversion. +type DeleteFleetEpmPackagesPkgnamePkgversionParams struct { + Force *bool `form:"force,omitempty" json:"force,omitempty"` +} + +// GetFleetEpmPackagesPkgnamePkgversionParams defines parameters for GetFleetEpmPackagesPkgnamePkgversion. +type GetFleetEpmPackagesPkgnamePkgversionParams struct { + IgnoreUnverified *bool `form:"ignoreUnverified,omitempty" json:"ignoreUnverified,omitempty"` + Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` + Full *bool `form:"full,omitempty" json:"full,omitempty"` + WithMetadata *bool `form:"withMetadata,omitempty" json:"withMetadata,omitempty"` +} + +// PostFleetEpmPackagesPkgnamePkgversionJSONBody defines parameters for PostFleetEpmPackagesPkgnamePkgversion. +type PostFleetEpmPackagesPkgnamePkgversionJSONBody struct { + Force *bool `json:"force,omitempty"` + IgnoreConstraints *bool `json:"ignore_constraints,omitempty"` +} + +// PostFleetEpmPackagesPkgnamePkgversionParams defines parameters for PostFleetEpmPackagesPkgnamePkgversion. +type PostFleetEpmPackagesPkgnamePkgversionParams struct { + Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` + IgnoreMappingUpdateErrors *bool `form:"ignoreMappingUpdateErrors,omitempty" json:"ignoreMappingUpdateErrors,omitempty"` + SkipDataStreamRollover *bool `form:"skipDataStreamRollover,omitempty" json:"skipDataStreamRollover,omitempty"` +} + +// PutFleetEpmPackagesPkgnamePkgversionJSONBody defines parameters for PutFleetEpmPackagesPkgnamePkgversion. +type PutFleetEpmPackagesPkgnamePkgversionJSONBody struct { + KeepPoliciesUpToDate bool `json:"keepPoliciesUpToDate"` +} + +// DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsParams defines parameters for DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssets. +type DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsParams struct { + PackagePolicyId string `form:"packagePolicyId" json:"packagePolicyId"` +} + +// PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsJSONBody defines parameters for PostFleetEpmPackagesPkgnamePkgversionKibanaAssets. +type PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsJSONBody struct { + Force *bool `json:"force,omitempty"` + + // SpaceIds When provided install assets in the specified spaces instead of the current space. + SpaceIds *[]string `json:"space_ids,omitempty"` +} + +// PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeJSONBody defines parameters for PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorize. +type PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeJSONBody struct { + Transforms []struct { + TransformId string `json:"transformId"` + } `json:"transforms"` +} + +// PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams defines parameters for PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorize. +type PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams struct { + Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` +} + +// GetFleetEpmTemplatesPkgnamePkgversionInputsParams defines parameters for GetFleetEpmTemplatesPkgnamePkgversionInputs. +type GetFleetEpmTemplatesPkgnamePkgversionInputsParams struct { + Format *GetFleetEpmTemplatesPkgnamePkgversionInputsParamsFormat `form:"format,omitempty" json:"format,omitempty"` + Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` + IgnoreUnverified *bool `form:"ignoreUnverified,omitempty" json:"ignoreUnverified,omitempty"` +} + +// GetFleetEpmTemplatesPkgnamePkgversionInputsParamsFormat defines parameters for GetFleetEpmTemplatesPkgnamePkgversionInputs. +type GetFleetEpmTemplatesPkgnamePkgversionInputsParamsFormat string + +// PostFleetFleetServerHostsJSONBody defines parameters for PostFleetFleetServerHosts. +type PostFleetFleetServerHostsJSONBody struct { + HostUrls []string `json:"host_urls"` + Id *string `json:"id,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + EsKey *PostFleetFleetServerHostsJSONBody_Secrets_Ssl_EsKey `json:"es_key,omitempty"` + Key *PostFleetFleetServerHostsJSONBody_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + ClientAuth *PostFleetFleetServerHostsJSONBodySslClientAuth `json:"client_auth,omitempty"` + EsCertificate *string `json:"es_certificate,omitempty"` + EsCertificateAuthorities *[]string `json:"es_certificate_authorities,omitempty"` + EsKey *string `json:"es_key,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` +} + +// PostFleetFleetServerHostsJSONBodySecretsSslEsKey0 defines parameters for PostFleetFleetServerHosts. +type PostFleetFleetServerHostsJSONBodySecretsSslEsKey0 struct { + Id string `json:"id"` +} + +// PostFleetFleetServerHostsJSONBodySecretsSslEsKey1 defines parameters for PostFleetFleetServerHosts. +type PostFleetFleetServerHostsJSONBodySecretsSslEsKey1 = string + +// PostFleetFleetServerHostsJSONBody_Secrets_Ssl_EsKey defines parameters for PostFleetFleetServerHosts. +type PostFleetFleetServerHostsJSONBody_Secrets_Ssl_EsKey struct { + union json.RawMessage +} + +// PostFleetFleetServerHostsJSONBodySecretsSslKey0 defines parameters for PostFleetFleetServerHosts. +type PostFleetFleetServerHostsJSONBodySecretsSslKey0 struct { + Id string `json:"id"` +} + +// PostFleetFleetServerHostsJSONBodySecretsSslKey1 defines parameters for PostFleetFleetServerHosts. +type PostFleetFleetServerHostsJSONBodySecretsSslKey1 = string + +// PostFleetFleetServerHostsJSONBody_Secrets_Ssl_Key defines parameters for PostFleetFleetServerHosts. +type PostFleetFleetServerHostsJSONBody_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// PostFleetFleetServerHostsJSONBodySslClientAuth defines parameters for PostFleetFleetServerHosts. +type PostFleetFleetServerHostsJSONBodySslClientAuth string + +// PutFleetFleetServerHostsItemidJSONBody defines parameters for PutFleetFleetServerHostsItemid. +type PutFleetFleetServerHostsItemidJSONBody struct { + HostUrls *[]string `json:"host_urls,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsInternal *bool `json:"is_internal,omitempty"` + Name *string `json:"name,omitempty"` + ProxyId *string `json:"proxy_id,omitempty"` + Secrets *struct { + Ssl *struct { + EsKey *PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_EsKey `json:"es_key,omitempty"` + Key *PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + ClientAuth *PutFleetFleetServerHostsItemidJSONBodySslClientAuth `json:"client_auth,omitempty"` + EsCertificate *string `json:"es_certificate,omitempty"` + EsCertificateAuthorities *[]string `json:"es_certificate_authorities,omitempty"` + EsKey *string `json:"es_key,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` +} + +// PutFleetFleetServerHostsItemidJSONBodySecretsSslEsKey0 defines parameters for PutFleetFleetServerHostsItemid. +type PutFleetFleetServerHostsItemidJSONBodySecretsSslEsKey0 struct { + Id string `json:"id"` +} + +// PutFleetFleetServerHostsItemidJSONBodySecretsSslEsKey1 defines parameters for PutFleetFleetServerHostsItemid. +type PutFleetFleetServerHostsItemidJSONBodySecretsSslEsKey1 = string + +// PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_EsKey defines parameters for PutFleetFleetServerHostsItemid. +type PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_EsKey struct { + union json.RawMessage +} + +// PutFleetFleetServerHostsItemidJSONBodySecretsSslKey0 defines parameters for PutFleetFleetServerHostsItemid. +type PutFleetFleetServerHostsItemidJSONBodySecretsSslKey0 struct { + Id string `json:"id"` +} + +// PutFleetFleetServerHostsItemidJSONBodySecretsSslKey1 defines parameters for PutFleetFleetServerHostsItemid. +type PutFleetFleetServerHostsItemidJSONBodySecretsSslKey1 = string + +// PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_Key defines parameters for PutFleetFleetServerHostsItemid. +type PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// PutFleetFleetServerHostsItemidJSONBodySslClientAuth defines parameters for PutFleetFleetServerHostsItemid. +type PutFleetFleetServerHostsItemidJSONBodySslClientAuth string + +// PostFleetHealthCheckJSONBody defines parameters for PostFleetHealthCheck. +type PostFleetHealthCheckJSONBody struct { + Id string `json:"id"` +} + +// GetFleetKubernetesParams defines parameters for GetFleetKubernetes. +type GetFleetKubernetesParams struct { + Download *bool `form:"download,omitempty" json:"download,omitempty"` + FleetServer *string `form:"fleetServer,omitempty" json:"fleetServer,omitempty"` + EnrolToken *string `form:"enrolToken,omitempty" json:"enrolToken,omitempty"` +} + +// GetFleetKubernetesDownloadParams defines parameters for GetFleetKubernetesDownload. +type GetFleetKubernetesDownloadParams struct { + Download *bool `form:"download,omitempty" json:"download,omitempty"` + FleetServer *string `form:"fleetServer,omitempty" json:"fleetServer,omitempty"` + EnrolToken *string `form:"enrolToken,omitempty" json:"enrolToken,omitempty"` +} + +// PostFleetMessageSigningServiceRotateKeyPairParams defines parameters for PostFleetMessageSigningServiceRotateKeyPair. +type PostFleetMessageSigningServiceRotateKeyPairParams struct { + Acknowledge *bool `form:"acknowledge,omitempty" json:"acknowledge,omitempty"` +} + +// GetFleetPackagePoliciesParams defines parameters for GetFleetPackagePolicies. +type GetFleetPackagePoliciesParams struct { + Page *float32 `form:"page,omitempty" json:"page,omitempty"` + PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` + SortField *string `form:"sortField,omitempty" json:"sortField,omitempty"` + SortOrder *GetFleetPackagePoliciesParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` + ShowUpgradeable *bool `form:"showUpgradeable,omitempty" json:"showUpgradeable,omitempty"` + Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` + Format *GetFleetPackagePoliciesParamsFormat `form:"format,omitempty" json:"format,omitempty"` + WithAgentCount *bool `form:"withAgentCount,omitempty" json:"withAgentCount,omitempty"` +} + +// GetFleetPackagePoliciesParamsSortOrder defines parameters for GetFleetPackagePolicies. +type GetFleetPackagePoliciesParamsSortOrder string + +// GetFleetPackagePoliciesParamsFormat defines parameters for GetFleetPackagePolicies. +type GetFleetPackagePoliciesParamsFormat string + +// PostFleetPackagePoliciesParams defines parameters for PostFleetPackagePolicies. +type PostFleetPackagePoliciesParams struct { + Format *PostFleetPackagePoliciesParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// PostFleetPackagePoliciesParamsFormat defines parameters for PostFleetPackagePolicies. +type PostFleetPackagePoliciesParamsFormat string + +// PostFleetPackagePoliciesBulkGetJSONBody defines parameters for PostFleetPackagePoliciesBulkGet. +type PostFleetPackagePoliciesBulkGetJSONBody struct { + // Ids list of package policy ids + Ids []string `json:"ids"` + IgnoreMissing *bool `json:"ignoreMissing,omitempty"` +} + +// PostFleetPackagePoliciesBulkGetParams defines parameters for PostFleetPackagePoliciesBulkGet. +type PostFleetPackagePoliciesBulkGetParams struct { + Format *PostFleetPackagePoliciesBulkGetParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// PostFleetPackagePoliciesBulkGetParamsFormat defines parameters for PostFleetPackagePoliciesBulkGet. +type PostFleetPackagePoliciesBulkGetParamsFormat string + +// PostFleetPackagePoliciesDeleteJSONBody defines parameters for PostFleetPackagePoliciesDelete. +type PostFleetPackagePoliciesDeleteJSONBody struct { + Force *bool `json:"force,omitempty"` + PackagePolicyIds []string `json:"packagePolicyIds"` +} + +// PostFleetPackagePoliciesUpgradeJSONBody defines parameters for PostFleetPackagePoliciesUpgrade. +type PostFleetPackagePoliciesUpgradeJSONBody struct { + PackagePolicyIds []string `json:"packagePolicyIds"` +} + +// PostFleetPackagePoliciesUpgradeDryrunJSONBody defines parameters for PostFleetPackagePoliciesUpgradeDryrun. +type PostFleetPackagePoliciesUpgradeDryrunJSONBody struct { + PackagePolicyIds []string `json:"packagePolicyIds"` + PackageVersion *string `json:"packageVersion,omitempty"` +} + +// DeleteFleetPackagePoliciesPackagepolicyidParams defines parameters for DeleteFleetPackagePoliciesPackagepolicyid. +type DeleteFleetPackagePoliciesPackagepolicyidParams struct { + Force *bool `form:"force,omitempty" json:"force,omitempty"` +} + +// GetFleetPackagePoliciesPackagepolicyidParams defines parameters for GetFleetPackagePoliciesPackagepolicyid. +type GetFleetPackagePoliciesPackagepolicyidParams struct { + Format *GetFleetPackagePoliciesPackagepolicyidParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// GetFleetPackagePoliciesPackagepolicyidParamsFormat defines parameters for GetFleetPackagePoliciesPackagepolicyid. +type GetFleetPackagePoliciesPackagepolicyidParamsFormat string + +// PutFleetPackagePoliciesPackagepolicyidParams defines parameters for PutFleetPackagePoliciesPackagepolicyid. +type PutFleetPackagePoliciesPackagepolicyidParams struct { + Format *PutFleetPackagePoliciesPackagepolicyidParamsFormat `form:"format,omitempty" json:"format,omitempty"` +} + +// PutFleetPackagePoliciesPackagepolicyidParamsFormat defines parameters for PutFleetPackagePoliciesPackagepolicyid. +type PutFleetPackagePoliciesPackagepolicyidParamsFormat string + +// PostFleetProxiesJSONBody defines parameters for PostFleetProxies. +type PostFleetProxiesJSONBody struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *string `json:"certificate_authorities,omitempty"` + CertificateKey *string `json:"certificate_key,omitempty"` + Id *string `json:"id,omitempty"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyHeaders *map[string]PostFleetProxiesJSONBody_ProxyHeaders_AdditionalProperties `json:"proxy_headers,omitempty"` + Url string `json:"url"` +} + +// PostFleetProxiesJSONBodyProxyHeaders0 defines parameters for PostFleetProxies. +type PostFleetProxiesJSONBodyProxyHeaders0 = string + +// PostFleetProxiesJSONBodyProxyHeaders1 defines parameters for PostFleetProxies. +type PostFleetProxiesJSONBodyProxyHeaders1 = bool + +// PostFleetProxiesJSONBodyProxyHeaders2 defines parameters for PostFleetProxies. +type PostFleetProxiesJSONBodyProxyHeaders2 = float32 + +// PostFleetProxiesJSONBody_ProxyHeaders_AdditionalProperties defines parameters for PostFleetProxies. +type PostFleetProxiesJSONBody_ProxyHeaders_AdditionalProperties struct { + union json.RawMessage +} + +// PutFleetProxiesItemidJSONBody defines parameters for PutFleetProxiesItemid. +type PutFleetProxiesItemidJSONBody struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *string `json:"certificate_authorities,omitempty"` + CertificateKey *string `json:"certificate_key,omitempty"` + Name *string `json:"name,omitempty"` + ProxyHeaders *map[string]PutFleetProxiesItemidJSONBody_ProxyHeaders_AdditionalProperties `json:"proxy_headers,omitempty"` + Url *string `json:"url,omitempty"` +} + +// PutFleetProxiesItemidJSONBodyProxyHeaders0 defines parameters for PutFleetProxiesItemid. +type PutFleetProxiesItemidJSONBodyProxyHeaders0 = string + +// PutFleetProxiesItemidJSONBodyProxyHeaders1 defines parameters for PutFleetProxiesItemid. +type PutFleetProxiesItemidJSONBodyProxyHeaders1 = bool + +// PutFleetProxiesItemidJSONBodyProxyHeaders2 defines parameters for PutFleetProxiesItemid. +type PutFleetProxiesItemidJSONBodyProxyHeaders2 = float32 + +// PutFleetProxiesItemidJSONBody_ProxyHeaders_AdditionalProperties defines parameters for PutFleetProxiesItemid. +type PutFleetProxiesItemidJSONBody_ProxyHeaders_AdditionalProperties struct { + union json.RawMessage +} + +// PostFleetServiceTokensJSONBody defines parameters for PostFleetServiceTokens. +type PostFleetServiceTokensJSONBody struct { + Remote *bool `json:"remote,omitempty"` +} + +// PutFleetSettingsJSONBody defines parameters for PutFleetSettings. +type PutFleetSettingsJSONBody struct { + AdditionalYamlConfig *string `json:"additional_yaml_config,omitempty"` + DeleteUnenrolledAgents *struct { + Enabled bool `json:"enabled"` + IsPreconfigured bool `json:"is_preconfigured"` + } `json:"delete_unenrolled_agents,omitempty"` + HasSeenAddDataNotice *bool `json:"has_seen_add_data_notice,omitempty"` + KibanaCaSha256 *string `json:"kibana_ca_sha256,omitempty"` + KibanaUrls *[]string `json:"kibana_urls,omitempty"` + PrereleaseIntegrationsEnabled *bool `json:"prerelease_integrations_enabled,omitempty"` +} + +// PutFleetSpaceSettingsJSONBody defines parameters for PutFleetSpaceSettings. +type PutFleetSpaceSettingsJSONBody struct { + AllowedNamespacePrefixes *[]string `json:"allowed_namespace_prefixes,omitempty"` +} + +// GetFleetUninstallTokensParams defines parameters for GetFleetUninstallTokens. +type GetFleetUninstallTokensParams struct { + // PolicyId Partial match filtering for policy IDs + PolicyId *string `form:"policyId,omitempty" json:"policyId,omitempty"` + Search *string `form:"search,omitempty" json:"search,omitempty"` + + // PerPage The number of items to return + PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` + Page *float32 `form:"page,omitempty" json:"page,omitempty"` +} + +// DeleteListParams defines parameters for DeleteList. +type DeleteListParams struct { + Id SecurityListsAPIListId `form:"id" json:"id"` + + // DeleteReferences Determines whether exception items referencing this value list should be deleted. + DeleteReferences *bool `form:"deleteReferences,omitempty" json:"deleteReferences,omitempty"` + + // IgnoreReferences Determines whether to delete value list without performing any additional checks of where this list may be utilized. + IgnoreReferences *bool `form:"ignoreReferences,omitempty" json:"ignoreReferences,omitempty"` +} + +// ReadListParams defines parameters for ReadList. +type ReadListParams struct { + Id SecurityListsAPIListId `form:"id" json:"id"` +} + +// PatchListJSONBody defines parameters for PatchList. +type PatchListJSONBody struct { + // UnderscoreVersion The version id, normally returned by the API when the document is retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *SecurityListsAPIListVersionId `json:"_version,omitempty"` + + // Description Describes the value list. + Description *SecurityListsAPIListDescription `json:"description,omitempty"` + + // Id Value list's identifier. + Id SecurityListsAPIListId `json:"id"` + + // Meta Placeholder for metadata about the value list. + Meta *SecurityListsAPIListMetadata `json:"meta,omitempty"` + + // Name Value list's name. + Name *SecurityListsAPIListName `json:"name,omitempty"` + + // Version The document version number. + Version *SecurityListsAPIListVersion `json:"version,omitempty"` +} + +// CreateListJSONBody defines parameters for CreateList. +type CreateListJSONBody struct { + // Description Describes the value list. + Description SecurityListsAPIListDescription `json:"description"` + + // Deserializer Determines how retrieved list item values are presented. By default list items are presented using these Handelbar expressions: + // + // - `{{{value}}}` - Single value item types, such as `ip`, `long`, `date`, `keyword`, and `text`. + // - `{{{gte}}}-{{{lte}}}` - Range value item types, such as `ip_range`, `double_range`, `float_range`, `integer_range`, and `long_range`. + // - `{{{gte}}},{{{lte}}}` - Date range values. + Deserializer *SecurityListsAPIListDeserializer `json:"deserializer,omitempty"` + + // Id Value list's identifier. + Id *SecurityListsAPIListId `json:"id,omitempty"` + + // Meta Placeholder for metadata about the value list. + Meta *SecurityListsAPIListMetadata `json:"meta,omitempty"` + + // Name Value list's name. + Name SecurityListsAPIListName `json:"name"` + + // Serializer Determines how uploaded list item values are parsed. By default, list items are parsed using these named regex groups: + // + // - `(?.+)` - Single value item types, such as ip, long, date, keyword, and text. + // - `(?.+)-(?.+)|(?.+)` - Range value item types, such as `date_range`, `ip_range`, `double_range`, `float_range`, `integer_range`, and `long_range`. + Serializer *SecurityListsAPIListSerializer `json:"serializer,omitempty"` + + // Type Specifies the Elasticsearch data type of excludes the list container holds. Some common examples: + // + // - `keyword`: Many ECS fields are Elasticsearch keywords + // - `ip`: IP addresses + // - `ip_range`: Range of IP addresses (supports IPv4, IPv6, and CIDR notation) + Type SecurityListsAPIListType `json:"type"` + Version *int `json:"version,omitempty"` +} + +// UpdateListJSONBody defines parameters for UpdateList. +type UpdateListJSONBody struct { + // UnderscoreVersion The version id, normally returned by the API when the document is retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *SecurityListsAPIListVersionId `json:"_version,omitempty"` + + // Description Describes the value list. + Description SecurityListsAPIListDescription `json:"description"` + + // Id Value list's identifier. + Id SecurityListsAPIListId `json:"id"` + + // Meta Placeholder for metadata about the value list. + Meta *SecurityListsAPIListMetadata `json:"meta,omitempty"` + + // Name Value list's name. + Name SecurityListsAPIListName `json:"name"` + + // Version The document version number. + Version *SecurityListsAPIListVersion `json:"version,omitempty"` +} + +// FindListsParams defines parameters for FindLists. +type FindListsParams struct { + // Page The page number to return. + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of value lists to return per page. + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // SortField Determines which field is used to sort the results. + SortField *string `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder Determines the sort order, which can be `desc` or `asc` + SortOrder *FindListsParamsSortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` + + // Cursor Returns the lists that come after the last lists returned in the previous call (use the `cursor` value returned in the previous call). This parameter uses the `tie_breaker_id` field to ensure all lists are sorted and returned correctly. + Cursor *SecurityListsAPIFindListsCursor `form:"cursor,omitempty" json:"cursor,omitempty"` + + // Filter Filters the returned results according to the value of the specified field, + // using the : syntax. + Filter *SecurityListsAPIFindListsFilter `form:"filter,omitempty" json:"filter,omitempty"` +} + +// FindListsParamsSortOrder defines parameters for FindLists. +type FindListsParamsSortOrder string + +// DeleteListItemParams defines parameters for DeleteListItem. +type DeleteListItemParams struct { + // Id Value list item's identifier. Required if `list_id` and `value` are not specified. + Id *SecurityListsAPIListItemId `form:"id,omitempty" json:"id,omitempty"` + + // ListId Value list's identifier. Required if `id` is not specified. + ListId *SecurityListsAPIListId `form:"list_id,omitempty" json:"list_id,omitempty"` + + // Value The value used to evaluate exceptions. Required if `id` is not specified. + Value *string `form:"value,omitempty" json:"value,omitempty"` + + // Refresh Determines when changes made by the request are made visible to search. + Refresh *DeleteListItemParamsRefresh `form:"refresh,omitempty" json:"refresh,omitempty"` +} + +// DeleteListItemParamsRefresh defines parameters for DeleteListItem. +type DeleteListItemParamsRefresh string + +// ReadListItemParams defines parameters for ReadListItem. +type ReadListItemParams struct { + // Id Value list item identifier. Required if `list_id` and `value` are not specified. + Id *SecurityListsAPIListId `form:"id,omitempty" json:"id,omitempty"` + + // ListId Value list item list's `id` identfier. Required if `id` is not specified. + ListId *SecurityListsAPIListId `form:"list_id,omitempty" json:"list_id,omitempty"` + + // Value The value used to evaluate exceptions. Required if `id` is not specified. + Value *string `form:"value,omitempty" json:"value,omitempty"` +} + +// PatchListItemJSONBody defines parameters for PatchListItem. +type PatchListItemJSONBody struct { + // UnderscoreVersion The version id, normally returned by the API when the document is retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *SecurityListsAPIListVersionId `json:"_version,omitempty"` + + // Id Value list item's identifier. + Id SecurityListsAPIListItemId `json:"id"` + + // Meta Placeholder for metadata about the value list item. + Meta *SecurityListsAPIListItemMetadata `json:"meta,omitempty"` + + // Refresh Determines when changes made by the request are made visible to search. + Refresh *PatchListItemJSONBodyRefresh `json:"refresh,omitempty"` + + // Value The value used to evaluate exceptions. + Value *SecurityListsAPIListItemValue `json:"value,omitempty"` +} + +// PatchListItemJSONBodyRefresh defines parameters for PatchListItem. +type PatchListItemJSONBodyRefresh string + +// CreateListItemJSONBody defines parameters for CreateListItem. +type CreateListItemJSONBody struct { + // Id Value list item's identifier. + Id *SecurityListsAPIListItemId `json:"id,omitempty"` + + // ListId Value list's identifier. + ListId SecurityListsAPIListId `json:"list_id"` + + // Meta Placeholder for metadata about the value list item. + Meta *SecurityListsAPIListItemMetadata `json:"meta,omitempty"` + + // Refresh Determines when changes made by the request are made visible to search. + Refresh *CreateListItemJSONBodyRefresh `json:"refresh,omitempty"` + + // Value The value used to evaluate exceptions. + Value SecurityListsAPIListItemValue `json:"value"` +} + +// CreateListItemJSONBodyRefresh defines parameters for CreateListItem. +type CreateListItemJSONBodyRefresh string + +// UpdateListItemJSONBody defines parameters for UpdateListItem. +type UpdateListItemJSONBody struct { + // UnderscoreVersion The version id, normally returned by the API when the document is retrieved. Use it ensure updates are done against the latest version. + UnderscoreVersion *SecurityListsAPIListVersionId `json:"_version,omitempty"` + + // Id Value list item's identifier. + Id SecurityListsAPIListItemId `json:"id"` + + // Meta Placeholder for metadata about the value list item. + Meta *SecurityListsAPIListItemMetadata `json:"meta,omitempty"` + + // Value The value used to evaluate exceptions. + Value SecurityListsAPIListItemValue `json:"value"` +} + +// ExportListItemsParams defines parameters for ExportListItems. +type ExportListItemsParams struct { + // ListId Value list's `id` to export. + ListId SecurityListsAPIListId `form:"list_id" json:"list_id"` +} + +// FindListItemsParams defines parameters for FindListItems. +type FindListItemsParams struct { + ListId SecurityListsAPIListId `form:"list_id" json:"list_id"` + + // Page The page number to return. + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of list items to return per page. + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // SortField Determines which field is used to sort the results. + SortField *string `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder Determines the sort order, which can be `desc` or `asc` + SortOrder *FindListItemsParamsSortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` + Cursor *SecurityListsAPIFindListItemsCursor `form:"cursor,omitempty" json:"cursor,omitempty"` + + // Filter Filters the returned results according to the value of the specified field, + // using the : syntax. + Filter *SecurityListsAPIFindListItemsFilter `form:"filter,omitempty" json:"filter,omitempty"` +} + +// FindListItemsParamsSortOrder defines parameters for FindListItems. +type FindListItemsParamsSortOrder string + +// ImportListItemsMultipartBody defines parameters for ImportListItems. +type ImportListItemsMultipartBody struct { + // File A `.txt` or `.csv` file containing newline separated list items. + File *openapi_types.File `json:"file,omitempty"` +} + +// ImportListItemsParams defines parameters for ImportListItems. +type ImportListItemsParams struct { + // ListId List's id. + // + // Required when importing to an existing list. + ListId *SecurityListsAPIListId `form:"list_id,omitempty" json:"list_id,omitempty"` + + // Type Type of the importing list. + // + // Required when importing a new list whose list `id` is not specified. + Type *SecurityListsAPIListType `form:"type,omitempty" json:"type,omitempty"` + + // Serializer Determines how uploaded list item values are parsed. By default, list items are parsed using these named regex groups: + // + // - `(?.+)` - Single value item types, such as ip, long, date, keyword, and text. + // - `(?.+)-(?.+)|(?.+)` - Range value item types, such as `date_range`, `ip_range`, `double_range`, `float_range`, `integer_range`, and `long_range`. + Serializer *string `form:"serializer,omitempty" json:"serializer,omitempty"` + + // Deserializer Determines how retrieved list item values are presented. By default list items are presented using these Handelbar expressions: + // + // - `{{{value}}}` - Single value item types, such as `ip`, `long`, `date`, `keyword`, and `text`. + // - `{{{gte}}}-{{{lte}}}` - Range value item types, such as `ip_range`, `double_range`, `float_range`, `integer_range`, and `long_range`. + // - `{{{gte}}},{{{lte}}}` - Date range values. + Deserializer *string `form:"deserializer,omitempty" json:"deserializer,omitempty"` + + // Refresh Determines when changes made by the request are made visible to search. + Refresh *ImportListItemsParamsRefresh `form:"refresh,omitempty" json:"refresh,omitempty"` +} + +// ImportListItemsParamsRefresh defines parameters for ImportListItems. +type ImportListItemsParamsRefresh string + +// PutLogstashPipelineJSONBody defines parameters for PutLogstashPipeline. +type PutLogstashPipelineJSONBody struct { + // Description A description of the pipeline. + Description *string `json:"description,omitempty"` + + // Pipeline A definition for the pipeline. + Pipeline string `json:"pipeline"` + + // Settings Supported settings, represented as object keys, include the following: + // + // - `pipeline.workers` + // - `pipeline.batch.size` + // - `pipeline.batch.delay` + // - `pipeline.ecs_compatibility` + // - `pipeline.ordered` + // - `queue.type` + // - `queue.max_bytes` + // - `queue.checkpoint.writes` + Settings *map[string]interface{} `json:"settings,omitempty"` +} + +// MlSyncParams defines parameters for MlSync. +type MlSyncParams struct { + // Simulate When true, simulates the synchronization by returning only the list of actions that would be performed. + Simulate *MachineLearningAPIsSimulateParam `form:"simulate,omitempty" json:"simulate,omitempty"` +} + +// DeleteNoteJSONBody defines parameters for DeleteNote. +type DeleteNoteJSONBody struct { + union json.RawMessage +} + +// DeleteNoteJSONBody0 defines parameters for DeleteNote. +type DeleteNoteJSONBody0 struct { + NoteId string `json:"noteId"` +} + +// DeleteNoteJSONBody1 defines parameters for DeleteNote. +type DeleteNoteJSONBody1 struct { + NoteIds *[]string `json:"noteIds"` +} + +// GetNotesParams defines parameters for GetNotes. +type GetNotesParams struct { + DocumentIds *SecurityTimelineAPIDocumentIds `form:"documentIds,omitempty" json:"documentIds,omitempty"` + SavedObjectIds *SecurityTimelineAPISavedObjectIds `form:"savedObjectIds,omitempty" json:"savedObjectIds,omitempty"` + Page *string `form:"page,omitempty" json:"page,omitempty"` + PerPage *string `form:"perPage,omitempty" json:"perPage,omitempty"` + Search *string `form:"search,omitempty" json:"search,omitempty"` + SortField *string `form:"sortField,omitempty" json:"sortField,omitempty"` + SortOrder *string `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` + Filter *string `form:"filter,omitempty" json:"filter,omitempty"` + CreatedByFilter *string `form:"createdByFilter,omitempty" json:"createdByFilter,omitempty"` + AssociatedFilter *SecurityTimelineAPIAssociatedFilterType `form:"associatedFilter,omitempty" json:"associatedFilter,omitempty"` +} + +// PersistNoteRouteJSONBody defines parameters for PersistNoteRoute. +type PersistNoteRouteJSONBody struct { + Note SecurityTimelineAPIBareNote `json:"note"` + + // NoteId The `savedObjectId` of the note + NoteId *string `json:"noteId,omitempty"` + + // Version The version of the note + Version *string `json:"version,omitempty"` +} + +// ObservabilityAiAssistantChatCompleteJSONBody defines parameters for ObservabilityAiAssistantChatComplete. +type ObservabilityAiAssistantChatCompleteJSONBody struct { + Actions *[]ObservabilityAIAssistantAPIFunction `json:"actions,omitempty"` + + // ConnectorId A unique identifier for the connector. + ConnectorId string `json:"connectorId"` + + // ConversationId A unique identifier for the conversation if you are continuing an existing conversation. + ConversationId *string `json:"conversationId,omitempty"` + + // DisableFunctions Flag indicating whether all function calls should be disabled for the conversation. If true, no calls to functions will be made. + DisableFunctions *bool `json:"disableFunctions,omitempty"` + + // Instructions An array of instruction objects, which can be either simple strings or detailed objects. + Instructions *[]ObservabilityAIAssistantAPIInstruction `json:"instructions,omitempty"` + + // Messages An array of message objects containing the conversation history. + Messages []ObservabilityAIAssistantAPIMessage `json:"messages"` + + // Persist Indicates whether the conversation should be saved to storage. If true, the conversation will be saved and will be available in Kibana. + Persist bool `json:"persist"` + + // Title A title for the conversation. + Title *string `json:"title,omitempty"` +} + +// OsqueryFindLiveQueriesParams defines parameters for OsqueryFindLiveQueries. +type OsqueryFindLiveQueriesParams struct { + Kuery *SecurityOsqueryAPIKueryOrUndefined `form:"kuery,omitempty" json:"kuery,omitempty"` + Page *SecurityOsqueryAPIPageOrUndefined `form:"page,omitempty" json:"page,omitempty"` + PageSize *SecurityOsqueryAPIPageSizeOrUndefined `form:"pageSize,omitempty" json:"pageSize,omitempty"` + Sort *SecurityOsqueryAPISortOrUndefined `form:"sort,omitempty" json:"sort,omitempty"` + SortOrder *SecurityOsqueryAPISortOrderOrUndefined `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` +} + +// OsqueryGetLiveQueryResultsParams defines parameters for OsqueryGetLiveQueryResults. +type OsqueryGetLiveQueryResultsParams struct { + Kuery *SecurityOsqueryAPIKueryOrUndefined `form:"kuery,omitempty" json:"kuery,omitempty"` + Page *SecurityOsqueryAPIPageOrUndefined `form:"page,omitempty" json:"page,omitempty"` + PageSize *SecurityOsqueryAPIPageSizeOrUndefined `form:"pageSize,omitempty" json:"pageSize,omitempty"` + Sort *SecurityOsqueryAPISortOrUndefined `form:"sort,omitempty" json:"sort,omitempty"` + SortOrder *SecurityOsqueryAPISortOrderOrUndefined `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` +} + +// OsqueryFindPacksParams defines parameters for OsqueryFindPacks. +type OsqueryFindPacksParams struct { + Page *SecurityOsqueryAPIPageOrUndefined `form:"page,omitempty" json:"page,omitempty"` + PageSize *SecurityOsqueryAPIPageSizeOrUndefined `form:"pageSize,omitempty" json:"pageSize,omitempty"` + Sort *SecurityOsqueryAPISortOrUndefined `form:"sort,omitempty" json:"sort,omitempty"` + SortOrder *SecurityOsqueryAPISortOrderOrUndefined `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` +} + +// OsqueryFindSavedQueriesParams defines parameters for OsqueryFindSavedQueries. +type OsqueryFindSavedQueriesParams struct { + Page *SecurityOsqueryAPIPageOrUndefined `form:"page,omitempty" json:"page,omitempty"` + PageSize *SecurityOsqueryAPIPageSizeOrUndefined `form:"pageSize,omitempty" json:"pageSize,omitempty"` + Sort *SecurityOsqueryAPISortOrUndefined `form:"sort,omitempty" json:"sort,omitempty"` + SortOrder *SecurityOsqueryAPISortOrderOrUndefined `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` +} + +// PersistPinnedEventRouteJSONBody defines parameters for PersistPinnedEventRoute. +type PersistPinnedEventRouteJSONBody struct { + // EventId The `_id` of the associated event for this pinned event. + EventId string `json:"eventId"` + + // PinnedEventId The `savedObjectId` of the pinned event you want to unpin. + PinnedEventId *string `json:"pinnedEventId,omitempty"` + + // TimelineId The `savedObjectId` of the timeline that you want this pinned event unpinned from. + TimelineId string `json:"timelineId"` +} + +// ConfigureRiskEngineSavedObjectJSONBody defines parameters for ConfigureRiskEngineSavedObject. +type ConfigureRiskEngineSavedObjectJSONBody struct { + ExcludeAlertStatuses *[]string `json:"exclude_alert_statuses,omitempty"` + ExcludeAlertTags *[]string `json:"exclude_alert_tags,omitempty"` + Range *struct { + End *string `json:"end,omitempty"` + Start *string `json:"start,omitempty"` + } `json:"range,omitempty"` +} + +// ScheduleRiskEngineNowJSONBody defines parameters for ScheduleRiskEngineNow. +type ScheduleRiskEngineNowJSONBody interface{} + +// BulkCreateSavedObjectsJSONBody defines parameters for BulkCreateSavedObjects. +type BulkCreateSavedObjectsJSONBody = []map[string]interface{} + +// BulkCreateSavedObjectsParams defines parameters for BulkCreateSavedObjects. +type BulkCreateSavedObjectsParams struct { + // Overwrite When true, overwrites the document with the same identifier. + Overwrite *bool `form:"overwrite,omitempty" json:"overwrite,omitempty"` +} + +// BulkDeleteSavedObjectsJSONBody defines parameters for BulkDeleteSavedObjects. +type BulkDeleteSavedObjectsJSONBody = []map[string]interface{} + +// BulkDeleteSavedObjectsParams defines parameters for BulkDeleteSavedObjects. +type BulkDeleteSavedObjectsParams struct { + // Force When true, force delete objects that exist in multiple namespaces. Note that the option applies to the whole request. Use the delete object API to specify per-object deletion behavior. TIP: Use this if you attempted to delete objects and received an HTTP 400 error with the following message: "Unable to delete saved object that exists in multiple namespaces, use the force option to delete it anyway". WARNING: When you bulk delete objects that exist in multiple namespaces, the API also deletes legacy url aliases that reference the object. These requests are batched to minimise the impact but they can place a heavy load on Kibana. Make sure you limit the number of objects that exist in multiple namespaces in a single bulk delete operation. + Force *bool `form:"force,omitempty" json:"force,omitempty"` +} + +// BulkGetSavedObjectsJSONBody defines parameters for BulkGetSavedObjects. +type BulkGetSavedObjectsJSONBody = []map[string]interface{} + +// BulkResolveSavedObjectsJSONBody defines parameters for BulkResolveSavedObjects. +type BulkResolveSavedObjectsJSONBody = []map[string]interface{} + +// BulkUpdateSavedObjectsJSONBody defines parameters for BulkUpdateSavedObjects. +type BulkUpdateSavedObjectsJSONBody = []map[string]interface{} + +// PostSavedObjectsExportJSONBody defines parameters for PostSavedObjectsExport. +type PostSavedObjectsExportJSONBody struct { + // ExcludeExportDetails Do not add export details entry at the end of the stream. + ExcludeExportDetails *bool `json:"excludeExportDetails,omitempty"` + HasReference *PostSavedObjectsExportJSONBody_HasReference `json:"hasReference,omitempty"` + + // IncludeReferencesDeep Includes all of the referenced objects in the exported objects. + IncludeReferencesDeep *bool `json:"includeReferencesDeep,omitempty"` + + // Objects A list of objects to export. NOTE: this optiona cannot be combined with `types` option + Objects *[]struct { + Id string `json:"id"` + Type string `json:"type"` + } `json:"objects,omitempty"` + + // Search Search for documents to export using the Elasticsearch Simple Query String syntax. + Search *string `json:"search,omitempty"` + + // Type The saved object types to include in the export. Use `*` to export all the types. + Type *PostSavedObjectsExportJSONBody_Type `json:"type,omitempty"` +} + +// PostSavedObjectsExportJSONBodyHasReference0 defines parameters for PostSavedObjectsExport. +type PostSavedObjectsExportJSONBodyHasReference0 struct { + Id string `json:"id"` + Type string `json:"type"` +} + +// PostSavedObjectsExportJSONBodyHasReference1 defines parameters for PostSavedObjectsExport. +type PostSavedObjectsExportJSONBodyHasReference1 = []struct { + Id string `json:"id"` + Type string `json:"type"` +} + +// PostSavedObjectsExportJSONBody_HasReference defines parameters for PostSavedObjectsExport. +type PostSavedObjectsExportJSONBody_HasReference struct { + union json.RawMessage +} + +// PostSavedObjectsExportJSONBodyType0 defines parameters for PostSavedObjectsExport. +type PostSavedObjectsExportJSONBodyType0 = string + +// PostSavedObjectsExportJSONBodyType1 defines parameters for PostSavedObjectsExport. +type PostSavedObjectsExportJSONBodyType1 = []string + +// PostSavedObjectsExportJSONBody_Type defines parameters for PostSavedObjectsExport. +type PostSavedObjectsExportJSONBody_Type struct { + union json.RawMessage +} + +// FindSavedObjectsParams defines parameters for FindSavedObjects. +type FindSavedObjectsParams struct { + // Aggs An aggregation structure, serialized as a string. The field format is similar to filter, meaning that to use a saved object type attribute in the aggregation, the `savedObjectType.attributes.title: "myTitle"` format must be used. For root fields, the syntax is `savedObjectType.rootField`. NOTE: As objects change in Kibana, the results on each page of the response also change. Use the find API for traditional paginated results, but avoid using it to export large amounts of data. + Aggs *string `form:"aggs,omitempty" json:"aggs,omitempty"` + + // DefaultSearchOperator The default operator to use for the `simple_query_string`. + DefaultSearchOperator *string `form:"default_search_operator,omitempty" json:"default_search_operator,omitempty"` + + // Fields The fields to return in the attributes key of the response. + Fields *struct { + union json.RawMessage + } `form:"fields,omitempty" json:"fields,omitempty"` + + // Filter The filter is a KQL string with the caveat that if you filter with an attribute from your saved object type, it should look like that: `savedObjectType.attributes.title: "myTitle"`. However, if you use a root attribute of a saved object such as `updated_at`, you will have to define your filter like that: `savedObjectType.updated_at > 2018-12-22`. + Filter *string `form:"filter,omitempty" json:"filter,omitempty"` + + // HasNoReference Filters to objects that do not have a relationship with the type and identifier combination. + HasNoReference *map[string]interface{} `form:"has_no_reference,omitempty" json:"has_no_reference,omitempty"` + + // HasNoReferenceOperator The operator to use for the `has_no_reference` parameter. Either `OR` or `AND`. Defaults to `OR`. + HasNoReferenceOperator *string `form:"has_no_reference_operator,omitempty" json:"has_no_reference_operator,omitempty"` + + // HasReference Filters to objects that have a relationship with the type and ID combination. + HasReference *map[string]interface{} `form:"has_reference,omitempty" json:"has_reference,omitempty"` + + // HasReferenceOperator The operator to use for the `has_reference` parameter. Either `OR` or `AND`. Defaults to `OR`. + HasReferenceOperator *string `form:"has_reference_operator,omitempty" json:"has_reference_operator,omitempty"` + + // Page The page of objects to return. + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of objects to return per page. + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // Search An Elasticsearch `simple_query_string` query that filters the objects in the response. + Search *string `form:"search,omitempty" json:"search,omitempty"` + + // SearchFields The fields to perform the `simple_query_string` parsed query against. + SearchFields *struct { + union json.RawMessage + } `form:"search_fields,omitempty" json:"search_fields,omitempty"` + + // SortField Sorts the response. Includes "root" and "type" fields. "root" fields exist for all saved objects, such as "updated_at". "type" fields are specific to an object type, such as fields returned in the attributes key of the response. When a single type is defined in the type parameter, the "root" and "type" fields are allowed, and validity checks are made in that order. When multiple types are defined in the type parameter, only "root" fields are allowed. + SortField *string `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // Type The saved object types to include. + Type struct { + union json.RawMessage + } `form:"type" json:"type"` +} + +// FindSavedObjectsParamsFields0 defines parameters for FindSavedObjects. +type FindSavedObjectsParamsFields0 = string + +// FindSavedObjectsParamsFields1 defines parameters for FindSavedObjects. +type FindSavedObjectsParamsFields1 = []interface{} + +// FindSavedObjectsParamsSearchFields0 defines parameters for FindSavedObjects. +type FindSavedObjectsParamsSearchFields0 = string + +// FindSavedObjectsParamsSearchFields1 defines parameters for FindSavedObjects. +type FindSavedObjectsParamsSearchFields1 = []interface{} + +// FindSavedObjectsParamsType0 defines parameters for FindSavedObjects. +type FindSavedObjectsParamsType0 = string + +// FindSavedObjectsParamsType1 defines parameters for FindSavedObjects. +type FindSavedObjectsParamsType1 = []interface{} + +// PostSavedObjectsImportMultipartBody defines parameters for PostSavedObjectsImport. +type PostSavedObjectsImportMultipartBody struct { + // File A file exported using the export API. Changing the contents of the exported file in any way before importing it can cause errors, crashes or data loss. NOTE: The `savedObjects.maxImportExportSize` configuration setting limits the number of saved objects which may be included in this file. Similarly, the `savedObjects.maxImportPayloadBytes` setting limits the overall size of the file that can be imported. + File map[string]interface{} `json:"file"` +} + +// PostSavedObjectsImportParams defines parameters for PostSavedObjectsImport. +type PostSavedObjectsImportParams struct { + // Overwrite Overwrites saved objects when they already exist. When used, potential conflict errors are automatically resolved by overwriting the destination object. NOTE: This option cannot be used with the `createNewCopies` option. + Overwrite *bool `form:"overwrite,omitempty" json:"overwrite,omitempty"` + + // CreateNewCopies Creates copies of saved objects, regenerates each object ID, and resets the origin. When used, potential conflict errors are avoided. NOTE: This option cannot be used with the `overwrite` and `compatibilityMode` options. + CreateNewCopies *bool `form:"createNewCopies,omitempty" json:"createNewCopies,omitempty"` + + // CompatibilityMode Applies various adjustments to the saved objects that are being imported to maintain compatibility between different Kibana versions. Use this option only if you encounter issues with imported saved objects. NOTE: This option cannot be used with the `createNewCopies` option. + CompatibilityMode *bool `form:"compatibilityMode,omitempty" json:"compatibilityMode,omitempty"` +} + +// ResolveImportErrorsMultipartBody defines parameters for ResolveImportErrors. +type ResolveImportErrorsMultipartBody struct { + // File The same file given to the import API. + File *openapi_types.File `json:"file,omitempty"` + + // Retries The retry operations, which can specify how to resolve different types of errors. + Retries []struct { + // DestinationId Specifies the destination ID that the imported object should have, if different from the current ID. + DestinationId *string `json:"destinationId,omitempty"` + + // Id The saved object ID. + Id string `json:"id"` + + // IgnoreMissingReferences When set to `true`, ignores missing reference errors. When set to `false`, does nothing. + IgnoreMissingReferences *bool `json:"ignoreMissingReferences,omitempty"` + + // Overwrite When set to `true`, the source object overwrites the conflicting destination object. When set to `false`, does nothing. + Overwrite *bool `json:"overwrite,omitempty"` + + // ReplaceReferences A list of `type`, `from`, and `to` used to change the object references. + ReplaceReferences *[]struct { + From *string `json:"from,omitempty"` + To *string `json:"to,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"replaceReferences,omitempty"` + + // Type The saved object type. + Type string `json:"type"` + } `json:"retries"` +} + +// ResolveImportErrorsParams defines parameters for ResolveImportErrors. +type ResolveImportErrorsParams struct { + // CompatibilityMode Applies various adjustments to the saved objects that are being imported to maintain compatibility between different Kibana versions. When enabled during the initial import, also enable when resolving import errors. This option cannot be used with the `createNewCopies` option. + CompatibilityMode *bool `form:"compatibilityMode,omitempty" json:"compatibilityMode,omitempty"` + + // CreateNewCopies Creates copies of the saved objects, regenerates each object ID, and resets the origin. When enabled during the initial import, also enable when resolving import errors. + CreateNewCopies *bool `form:"createNewCopies,omitempty" json:"createNewCopies,omitempty"` +} + +// CreateSavedObjectJSONBody defines parameters for CreateSavedObject. +type CreateSavedObjectJSONBody struct { + // Attributes The data that you want to create. WARNING: When you create saved objects, attributes are not validated, which allows you to pass arbitrary and ill-formed data into the API that can break Kibana. Make sure any data that you send to the API is properly formed. + Attributes SavedObjectsAttributes `json:"attributes"` + + // InitialNamespaces Identifiers for the spaces in which this object is created. If this is provided, the object is created only in the explicitly defined spaces. If this is not provided, the object is created in the current space (default behavior). For shareable object types (registered with `namespaceType: 'multiple'`), this option can be used to specify one or more spaces, including the "All spaces" identifier ('*'). For isolated object types (registered with `namespaceType: 'single'` or `namespaceType: 'multiple-isolated'`), this option can only be used to specify a single space, and the "All spaces" identifier ('*') is not allowed. For global object types (`registered with `namespaceType: agnostic`), this option cannot be used. + InitialNamespaces *SavedObjectsInitialNamespaces `json:"initialNamespaces,omitempty"` + + // References Objects with `name`, `id`, and `type` properties that describe the other saved objects that this object references. Use `name` in attributes to refer to the other saved object, but never the `id`, which can update automatically during migrations or import and export. + References *SavedObjectsReferences `json:"references,omitempty"` +} + +// CreateSavedObjectParams defines parameters for CreateSavedObject. +type CreateSavedObjectParams struct { + // Overwrite If true, overwrites the document with the same identifier. + Overwrite *bool `form:"overwrite,omitempty" json:"overwrite,omitempty"` +} + +// CreateSavedObjectIdJSONBody defines parameters for CreateSavedObjectId. +type CreateSavedObjectIdJSONBody struct { + // Attributes The data that you want to create. WARNING: When you create saved objects, attributes are not validated, which allows you to pass arbitrary and ill-formed data into the API that can break Kibana. Make sure any data that you send to the API is properly formed. + Attributes SavedObjectsAttributes `json:"attributes"` + + // InitialNamespaces Identifiers for the spaces in which this object is created. If this is provided, the object is created only in the explicitly defined spaces. If this is not provided, the object is created in the current space (default behavior). For shareable object types (registered with `namespaceType: 'multiple'`), this option can be used to specify one or more spaces, including the "All spaces" identifier ('*'). For isolated object types (registered with `namespaceType: 'single'` or `namespaceType: 'multiple-isolated'`), this option can only be used to specify a single space, and the "All spaces" identifier ('*') is not allowed. For global object types (`registered with `namespaceType: agnostic`), this option cannot be used. + InitialNamespaces *SavedObjectsInitialNamespaces `json:"initialNamespaces,omitempty"` + + // References Identifiers for the spaces in which this object is created. If this is provided, the object is created only in the explicitly defined spaces. If this is not provided, the object is created in the current space (default behavior). For shareable object types (registered with `namespaceType: 'multiple'`), this option can be used to specify one or more spaces, including the "All spaces" identifier ('*'). For isolated object types (registered with `namespaceType: 'single'` or `namespaceType: 'multiple-isolated'`), this option can only be used to specify a single space, and the "All spaces" identifier ('*') is not allowed. For global object types (`registered with `namespaceType: agnostic`), this option cannot be used. + References *SavedObjectsInitialNamespaces `json:"references,omitempty"` +} + +// CreateSavedObjectIdParams defines parameters for CreateSavedObjectId. +type CreateSavedObjectIdParams struct { + // Overwrite If true, overwrites the document with the same identifier. + Overwrite *bool `form:"overwrite,omitempty" json:"overwrite,omitempty"` +} + +// UpdateSavedObjectJSONBody defines parameters for UpdateSavedObject. +type UpdateSavedObjectJSONBody = map[string]interface{} + +// GetSecurityRoleParams defines parameters for GetSecurityRole. +type GetSecurityRoleParams struct { + // ReplaceDeprecatedPrivileges If `true` and the response contains any privileges that are associated with deprecated features, they are omitted in favor of details about the appropriate replacement feature privileges. + ReplaceDeprecatedPrivileges *bool `form:"replaceDeprecatedPrivileges,omitempty" json:"replaceDeprecatedPrivileges,omitempty"` +} + +// PostSecurityRoleQueryJSONBody defines parameters for PostSecurityRoleQuery. +type PostSecurityRoleQueryJSONBody struct { + Filters *struct { + ShowReservedRoles *bool `json:"showReservedRoles,omitempty"` + } `json:"filters,omitempty"` + From *float32 `json:"from,omitempty"` + Query *string `json:"query,omitempty"` + Size *float32 `json:"size,omitempty"` + Sort *struct { + Direction PostSecurityRoleQueryJSONBodySortDirection `json:"direction"` + Field string `json:"field"` + } `json:"sort,omitempty"` +} + +// PostSecurityRoleQueryJSONBodySortDirection defines parameters for PostSecurityRoleQuery. +type PostSecurityRoleQueryJSONBodySortDirection string + +// GetSecurityRoleNameParams defines parameters for GetSecurityRoleName. +type GetSecurityRoleNameParams struct { + // ReplaceDeprecatedPrivileges If `true` and the response contains any privileges that are associated with deprecated features, they are omitted in favor of details about the appropriate replacement feature privileges. + ReplaceDeprecatedPrivileges *bool `form:"replaceDeprecatedPrivileges,omitempty" json:"replaceDeprecatedPrivileges,omitempty"` +} + +// PutSecurityRoleNameJSONBody defines parameters for PutSecurityRoleName. +type PutSecurityRoleNameJSONBody struct { + // Description A description for the role. + Description *string `json:"description,omitempty"` + Elasticsearch struct { + Cluster *[]string `json:"cluster,omitempty"` + Indices *[]struct { + // AllowRestrictedIndices Restricted indices are a special category of indices that are used internally to store configuration data and should not be directly accessed. Only internal system roles should normally grant privileges over the restricted indices. Toggling this flag is very strongly discouraged because it could effectively grant unrestricted operations on critical data, making the entire system unstable or leaking sensitive information. If for administrative purposes you need to create a role with privileges covering restricted indices, however, you can set this property to true. In that case, the names field covers the restricted indices too. + AllowRestrictedIndices *bool `json:"allow_restricted_indices,omitempty"` + FieldSecurity *map[string][]string `json:"field_security,omitempty"` + Names []string `json:"names"` + Privileges []string `json:"privileges"` + + // Query A search query that defines the documents the role members have read access to. A document within the specified data streams and indices must match this query in order for it to be accessible by the role members. + Query *string `json:"query,omitempty"` + } `json:"indices,omitempty"` + RemoteCluster *[]struct { + Clusters []string `json:"clusters"` + Privileges []string `json:"privileges"` + } `json:"remote_cluster,omitempty"` + RemoteIndices *[]struct { + // AllowRestrictedIndices Restricted indices are a special category of indices that are used internally to store configuration data and should not be directly accessed. Only internal system roles should normally grant privileges over the restricted indices. Toggling this flag is very strongly discouraged because it could effectively grant unrestricted operations on critical data, making the entire system unstable or leaking sensitive information. If for administrative purposes you need to create a role with privileges covering restricted indices, however, you can set this property to true. In that case, the names field will cover the restricted indices too. + AllowRestrictedIndices *bool `json:"allow_restricted_indices,omitempty"` + Clusters []string `json:"clusters"` + FieldSecurity *map[string][]string `json:"field_security,omitempty"` + Names []string `json:"names"` + Privileges []string `json:"privileges"` + + // Query A search query that defines the documents the role members have read access to. A document within the specified data streams and indices must match this query in order for it to be accessible by the role members. + Query *string `json:"query,omitempty"` + } `json:"remote_indices,omitempty"` + RunAs *[]string `json:"run_as,omitempty"` + } `json:"elasticsearch"` + Kibana *[]struct { + Base *PutSecurityRoleNameJSONBody_Kibana_Base `json:"base,omitempty"` + Feature *map[string][]string `json:"feature,omitempty"` + Spaces *[]string `json:"spaces,omitempty"` + } `json:"kibana,omitempty"` + Metadata *map[string]interface{} `json:"metadata,omitempty"` +} + +// PutSecurityRoleNameParams defines parameters for PutSecurityRoleName. +type PutSecurityRoleNameParams struct { + // CreateOnly When true, a role is not overwritten if it already exists. + CreateOnly *bool `form:"createOnly,omitempty" json:"createOnly,omitempty"` +} + +// PutSecurityRoleNameJSONBodyKibanaBase0 defines parameters for PutSecurityRoleName. +type PutSecurityRoleNameJSONBodyKibanaBase0 = []string + +// PutSecurityRoleNameJSONBodyKibanaBase1 defines parameters for PutSecurityRoleName. +type PutSecurityRoleNameJSONBodyKibanaBase1 = []string + +// PutSecurityRoleNameJSONBody_Kibana_Base defines parameters for PutSecurityRoleName. +type PutSecurityRoleNameJSONBody_Kibana_Base struct { + union json.RawMessage +} + +// PostSecurityRolesJSONBody defines parameters for PostSecurityRoles. +type PostSecurityRolesJSONBody struct { + Roles struct { + // Description A description for the role. + Description *string `json:"description,omitempty"` + Elasticsearch struct { + Cluster *[]string `json:"cluster,omitempty"` + Indices *[]struct { + // AllowRestrictedIndices Restricted indices are a special category of indices that are used internally to store configuration data and should not be directly accessed. Only internal system roles should normally grant privileges over the restricted indices. Toggling this flag is very strongly discouraged because it could effectively grant unrestricted operations on critical data, making the entire system unstable or leaking sensitive information. If for administrative purposes you need to create a role with privileges covering restricted indices, however, you can set this property to true. In that case, the names field covers the restricted indices too. + AllowRestrictedIndices *bool `json:"allow_restricted_indices,omitempty"` + FieldSecurity *map[string][]string `json:"field_security,omitempty"` + Names []string `json:"names"` + Privileges []string `json:"privileges"` + + // Query A search query that defines the documents the role members have read access to. A document within the specified data streams and indices must match this query in order for it to be accessible by the role members. + Query *string `json:"query,omitempty"` + } `json:"indices,omitempty"` + RemoteCluster *[]struct { + Clusters []string `json:"clusters"` + Privileges []string `json:"privileges"` + } `json:"remote_cluster,omitempty"` + RemoteIndices *[]struct { + // AllowRestrictedIndices Restricted indices are a special category of indices that are used internally to store configuration data and should not be directly accessed. Only internal system roles should normally grant privileges over the restricted indices. Toggling this flag is very strongly discouraged because it could effectively grant unrestricted operations on critical data, making the entire system unstable or leaking sensitive information. If for administrative purposes you need to create a role with privileges covering restricted indices, however, you can set this property to true. In that case, the names field will cover the restricted indices too. + AllowRestrictedIndices *bool `json:"allow_restricted_indices,omitempty"` + Clusters []string `json:"clusters"` + FieldSecurity *map[string][]string `json:"field_security,omitempty"` + Names []string `json:"names"` + Privileges []string `json:"privileges"` + + // Query A search query that defines the documents the role members have read access to. A document within the specified data streams and indices must match this query in order for it to be accessible by the role members. + Query *string `json:"query,omitempty"` + } `json:"remote_indices,omitempty"` + RunAs *[]string `json:"run_as,omitempty"` + } `json:"elasticsearch"` + Kibana *[]struct { + Base *PostSecurityRolesJSONBody_Roles_Kibana_Base `json:"base,omitempty"` + Feature *map[string][]string `json:"feature,omitempty"` + Spaces *[]string `json:"spaces,omitempty"` + } `json:"kibana,omitempty"` + Metadata *map[string]interface{} `json:"metadata,omitempty"` + } `json:"roles"` +} + +// PostSecurityRolesJSONBodyRolesKibanaBase0 defines parameters for PostSecurityRoles. +type PostSecurityRolesJSONBodyRolesKibanaBase0 = []string + +// PostSecurityRolesJSONBodyRolesKibanaBase1 defines parameters for PostSecurityRoles. +type PostSecurityRolesJSONBodyRolesKibanaBase1 = []string + +// PostSecurityRolesJSONBody_Roles_Kibana_Base defines parameters for PostSecurityRoles. +type PostSecurityRolesJSONBody_Roles_Kibana_Base struct { + union json.RawMessage +} + +// PostSecuritySessionInvalidateJSONBody defines parameters for PostSecuritySessionInvalidate. +type PostSecuritySessionInvalidateJSONBody struct { + // Match The method Kibana uses to determine which sessions to invalidate. If it is `all`, all existing sessions will be invalidated. If it is `query`, only the sessions that match the query will be invalidated. + Match PostSecuritySessionInvalidateJSONBodyMatch `json:"match"` + + // Query The query that Kibana uses to match the sessions to invalidate when the `match` parameter is set to `query`. + Query *struct { + // Provider The authentication providers that will have their user sessions invalidated. + Provider struct { + // Name The authentication provider name. + Name *string `json:"name,omitempty"` + + // Type The authentication provide type. For example: `basic`, `token`, `saml`, `oidc`, `kerberos`, or `pki`. + Type string `json:"type"` + } `json:"provider"` + + // Username The username that will have its sessions invalidated. + Username *string `json:"username,omitempty"` + } `json:"query,omitempty"` +} + +// PostSecuritySessionInvalidateJSONBodyMatch defines parameters for PostSecuritySessionInvalidate. +type PostSecuritySessionInvalidateJSONBodyMatch string + +// PerformAnonymizationFieldsBulkActionJSONBody defines parameters for PerformAnonymizationFieldsBulkAction. +type PerformAnonymizationFieldsBulkActionJSONBody struct { + // Create Array of anonymization fields to create. + Create *[]SecurityAIAssistantAPIAnonymizationFieldCreateProps `json:"create,omitempty"` + + // Delete Object containing the query to filter anonymization fields and/or an array of anonymization field IDs to delete. + Delete *struct { + // Ids Array of IDs to apply the action to. + Ids *[]string `json:"ids,omitempty"` + + // Query Query to filter the bulk action. + Query *string `json:"query,omitempty"` + } `json:"delete,omitempty"` + + // Update Array of anonymization fields to update. + Update *[]SecurityAIAssistantAPIAnonymizationFieldUpdateProps `json:"update,omitempty"` +} + +// FindAnonymizationFieldsParams defines parameters for FindAnonymizationFields. +type FindAnonymizationFieldsParams struct { + // Fields Fields to return + Fields *[]string `form:"fields,omitempty" json:"fields,omitempty"` + + // Filter Search query + Filter *string `form:"filter,omitempty" json:"filter,omitempty"` + + // SortField Field to sort by + SortField *SecurityAIAssistantAPIFindAnonymizationFieldsSortField `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder Sort order + SortOrder *SecurityAIAssistantAPISortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` + + // Page Page number + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage AnonymizationFields per page + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // AllData If true, additionally fetch all anonymization fields, otherwise fetch only the provided page + AllData *bool `form:"all_data,omitempty" json:"all_data,omitempty"` +} + +// ChatCompleteParams defines parameters for ChatComplete. +type ChatCompleteParams struct { + // ContentReferencesDisabled If true, the response will not include content references. + ContentReferencesDisabled *bool `form:"content_references_disabled,omitempty" json:"content_references_disabled,omitempty"` +} + +// DeleteAllConversationsJSONBody defines parameters for DeleteAllConversations. +type DeleteAllConversationsJSONBody struct { + // ExcludedIds Optional list of conversation IDs to delete. + ExcludedIds *[]string `json:"excludedIds,omitempty"` +} + +// FindConversationsParams defines parameters for FindConversations. +type FindConversationsParams struct { + // Fields A list of fields to include in the response. If omitted, all fields are returned. + Fields *[]string `form:"fields,omitempty" json:"fields,omitempty"` + + // Filter A search query to filter the conversations. Can match against titles, messages, or other conversation attributes. + Filter *string `form:"filter,omitempty" json:"filter,omitempty"` + + // SortField The field by which to sort the results. Valid fields are `created_at`, `title`, and `updated_at`. + SortField *SecurityAIAssistantAPIFindConversationsSortField `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder The order in which to sort the results. Can be either `asc` for ascending or `desc` for descending. + SortOrder *SecurityAIAssistantAPISortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` + + // Page The page number of the results to retrieve. Default is 1. + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of conversations to return per page. Default is 20. + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // IsOwner Whether to return conversations that the current user owns. If true, only conversations owned by the user are returned. + IsOwner *bool `form:"is_owner,omitempty" json:"is_owner,omitempty"` +} + +// PerformKnowledgeBaseEntryBulkActionJSONBody defines parameters for PerformKnowledgeBaseEntryBulkAction. +type PerformKnowledgeBaseEntryBulkActionJSONBody struct { + // Create List of Knowledge Base Entries to create. + Create *[]SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps `json:"create,omitempty"` + Delete *struct { + // Ids Array of Knowledge Base Entry IDs. + Ids *[]string `json:"ids,omitempty"` + + // Query Query to filter Knowledge Base Entries. + Query *string `json:"query,omitempty"` + } `json:"delete,omitempty"` + + // Update List of Knowledge Base Entries to update. + Update *[]SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps `json:"update,omitempty"` +} + +// FindKnowledgeBaseEntriesParams defines parameters for FindKnowledgeBaseEntries. +type FindKnowledgeBaseEntriesParams struct { + // Fields A list of fields to include in the response. If not provided, all fields will be included. + Fields *[]string `form:"fields,omitempty" json:"fields,omitempty"` + + // Filter Search query to filter Knowledge Base Entries by specific criteria. + Filter *string `form:"filter,omitempty" json:"filter,omitempty"` + + // SortField Field to sort the Knowledge Base Entries by. + SortField *SecurityAIAssistantAPIFindKnowledgeBaseEntriesSortField `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder Sort order for the results, either asc or desc. + SortOrder *SecurityAIAssistantAPISortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` + + // Page Page number for paginated results. Defaults to 1. + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage Number of Knowledge Base Entries to return per page. Defaults to 20. + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` +} + +// CreateKnowledgeBaseParams defines parameters for CreateKnowledgeBase. +type CreateKnowledgeBaseParams struct { + // ModelId ELSER modelId to use when setting up the Knowledge Base. If not provided, a default model will be used. + ModelId *string `form:"modelId,omitempty" json:"modelId,omitempty"` + + // IgnoreSecurityLabs Indicates whether we should or should not install Security Labs docs when setting up the Knowledge Base. Defaults to `false`. + IgnoreSecurityLabs *bool `form:"ignoreSecurityLabs,omitempty" json:"ignoreSecurityLabs,omitempty"` +} + +// PerformPromptsBulkActionJSONBody defines parameters for PerformPromptsBulkAction. +type PerformPromptsBulkActionJSONBody struct { + // Create List of prompts to be created. + Create *[]SecurityAIAssistantAPIPromptCreateProps `json:"create,omitempty"` + + // Delete Criteria for deleting prompts in bulk. + Delete *struct { + // Ids Array of IDs to apply the action to. + Ids *[]string `json:"ids,omitempty"` + + // Query Query to filter the bulk action. + Query *string `json:"query,omitempty"` + } `json:"delete,omitempty"` + + // Update List of prompts to be updated. + Update *[]SecurityAIAssistantAPIPromptUpdateProps `json:"update,omitempty"` +} + +// FindPromptsParams defines parameters for FindPrompts. +type FindPromptsParams struct { + // Fields List of specific fields to include in each returned prompt. + Fields *[]string `form:"fields,omitempty" json:"fields,omitempty"` + + // Filter Search query string to filter prompts by matching fields. + Filter *string `form:"filter,omitempty" json:"filter,omitempty"` + + // SortField Field to sort prompts by. + SortField *SecurityAIAssistantAPIFindPromptsSortField `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder Sort order, either asc or desc. + SortOrder *SecurityAIAssistantAPISortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` + + // Page Page number for pagination. + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage Number of prompts per page. + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` +} + +// PostUrlJSONBody defines parameters for PostUrl. +type PostUrlJSONBody struct { + // HumanReadableSlug When the `slug` parameter is omitted, the API will generate a random human-readable slug if `humanReadableSlug` is set to true. + HumanReadableSlug *bool `json:"humanReadableSlug,omitempty"` + + // LocatorId The identifier for the locator. + LocatorId string `json:"locatorId"` + + // Params An object which contains all necessary parameters for the given locator to resolve to a Kibana location. + // > warn + // > When you create a short URL, locator params are not validated, which allows you to pass arbitrary and ill-formed data into the API that can break Kibana. Make sure any data that you send to the API is properly formed. + Params map[string]interface{} `json:"params"` + + // Slug A custom short URL slug. The slug is the part of the short URL that identifies it. You can provide a custom slug which consists of latin alphabet letters, numbers, and `-._` characters. The slug must be at least 3 characters long, but no longer than 255 characters. + Slug *string `json:"slug,omitempty"` +} + +// PostSpacesCopySavedObjectsJSONBody defines parameters for PostSpacesCopySavedObjects. +type PostSpacesCopySavedObjectsJSONBody struct { + // CompatibilityMode Apply various adjustments to the saved objects that are being copied to maintain compatibility between different Kibana versions. Use this option only if you encounter issues with copied saved objects. This option cannot be used with the `createNewCopies` option. + CompatibilityMode *bool `json:"compatibilityMode,omitempty"` + + // CreateNewCopies Create new copies of saved objects, regenerate each object identifier, and reset the origin. When used, potential conflict errors are avoided. This option cannot be used with the `overwrite` and `compatibilityMode` options. + CreateNewCopies *bool `json:"createNewCopies,omitempty"` + + // IncludeReferences When set to true, all saved objects related to the specified saved objects will also be copied into the target spaces. + IncludeReferences *bool `json:"includeReferences,omitempty"` + Objects []struct { + // Id The identifier of the saved object to copy. + Id string `json:"id"` + + // Type The type of the saved object to copy. + Type string `json:"type"` + } `json:"objects"` + + // Overwrite When set to true, all conflicts are automatically overridden. When a saved object with a matching type and identifier exists in the target space, that version is replaced with the version from the source space. This option cannot be used with the `createNewCopies` option. + Overwrite *bool `json:"overwrite,omitempty"` + Spaces []string `json:"spaces"` +} + +// PostSpacesDisableLegacyUrlAliasesJSONBody defines parameters for PostSpacesDisableLegacyUrlAliases. +type PostSpacesDisableLegacyUrlAliasesJSONBody struct { + Aliases []struct { + // SourceId The alias source object identifier. This is the legacy object identifier. + SourceId string `json:"sourceId"` + + // TargetSpace The space where the alias target object exists. + TargetSpace string `json:"targetSpace"` + + // TargetType The type of alias target object. + TargetType string `json:"targetType"` + } `json:"aliases"` +} + +// PostSpacesGetShareableReferencesJSONBody defines parameters for PostSpacesGetShareableReferences. +type PostSpacesGetShareableReferencesJSONBody struct { + Objects []struct { + Id string `json:"id"` + Type string `json:"type"` + } `json:"objects"` +} + +// PostSpacesResolveCopySavedObjectsErrorsJSONBody defines parameters for PostSpacesResolveCopySavedObjectsErrors. +type PostSpacesResolveCopySavedObjectsErrorsJSONBody struct { + CompatibilityMode *bool `json:"compatibilityMode,omitempty"` + CreateNewCopies *bool `json:"createNewCopies,omitempty"` + IncludeReferences *bool `json:"includeReferences,omitempty"` + Objects []struct { + Id string `json:"id"` + Type string `json:"type"` + } `json:"objects"` + Retries map[string][]struct { + // CreateNewCopy Creates new copies of the saved objects, regenerates each object ID, and resets the origin. + CreateNewCopy *bool `json:"createNewCopy,omitempty"` + + // DestinationId Specifies the destination identifier that the copied object should have, if different from the current identifier. + DestinationId *string `json:"destinationId,omitempty"` + + // Id The saved object identifier. + Id string `json:"id"` + + // IgnoreMissingReferences When set to true, any missing references errors are ignored. + IgnoreMissingReferences *bool `json:"ignoreMissingReferences,omitempty"` + + // Overwrite When set to true, the saved object from the source space overwrites the conflicting object in the destination space. + Overwrite *bool `json:"overwrite,omitempty"` + + // Type The saved object type. + Type string `json:"type"` + } `json:"retries"` +} + +// PostSpacesUpdateObjectsSpacesJSONBody defines parameters for PostSpacesUpdateObjectsSpaces. +type PostSpacesUpdateObjectsSpacesJSONBody struct { + Objects []struct { + // Id The identifier of the saved object to update. + Id string `json:"id"` + + // Type The type of the saved object to update. + Type string `json:"type"` + } `json:"objects"` + SpacesToAdd []string `json:"spacesToAdd"` + SpacesToRemove []string `json:"spacesToRemove"` +} + +// GetSpacesSpaceParams defines parameters for GetSpacesSpace. +type GetSpacesSpaceParams struct { + // Purpose Specifies which authorization checks are applied to the API call. The default value is `any`. + Purpose *GetSpacesSpaceParamsPurpose `form:"purpose,omitempty" json:"purpose,omitempty"` + + // IncludeAuthorizedPurposes When enabled, the API returns any spaces that the user is authorized to access in any capacity and each space will contain the purposes for which the user is authorized. This can be useful to determine which spaces a user can read but not take a specific action in. If the security plugin is not enabled, this parameter has no effect, since no authorization checks take place. This parameter cannot be used in with the `purpose` parameter. + IncludeAuthorizedPurposes struct { + union json.RawMessage + } `form:"include_authorized_purposes" json:"include_authorized_purposes"` +} + +// GetSpacesSpaceParamsPurpose defines parameters for GetSpacesSpace. +type GetSpacesSpaceParamsPurpose string + +// GetSpacesSpaceParamsIncludeAuthorizedPurposes0 defines parameters for GetSpacesSpace. +type GetSpacesSpaceParamsIncludeAuthorizedPurposes0 bool + +// GetSpacesSpaceParamsIncludeAuthorizedPurposes1 defines parameters for GetSpacesSpace. +type GetSpacesSpaceParamsIncludeAuthorizedPurposes1 = bool + +// PostSpacesSpaceJSONBody defines parameters for PostSpacesSpace. +type PostSpacesSpaceJSONBody struct { + UnderscoreReserved *bool `json:"_reserved,omitempty"` + + // Color The hexadecimal color code used in the space avatar. By default, the color is automatically generated from the space name. + Color *string `json:"color,omitempty"` + + // Description A description for the space. + Description *string `json:"description,omitempty"` + DisabledFeatures *[]string `json:"disabledFeatures,omitempty"` + + // Id The space ID that is part of the Kibana URL when inside the space. Space IDs are limited to lowercase alphanumeric, underscore, and hyphen characters (a-z, 0-9, _, and -). You are cannot change the ID with the update operation. + Id string `json:"id"` + + // ImageUrl The data-URL encoded image to display in the space avatar. If specified, initials will not be displayed and the color will be visible as the background color for transparent images. For best results, your image should be 64x64. Images will not be optimized by this API call, so care should be taken when using custom images. + ImageUrl *string `json:"imageUrl,omitempty"` + + // Initials One or two characters that are shown in the space avatar. By default, the initials are automatically generated from the space name. + Initials *string `json:"initials,omitempty"` + + // Name The display name for the space. + Name string `json:"name"` + Solution *PostSpacesSpaceJSONBodySolution `json:"solution,omitempty"` +} + +// PostSpacesSpaceJSONBodySolution defines parameters for PostSpacesSpace. +type PostSpacesSpaceJSONBodySolution string + +// PutSpacesSpaceIdJSONBody defines parameters for PutSpacesSpaceId. +type PutSpacesSpaceIdJSONBody struct { + UnderscoreReserved *bool `json:"_reserved,omitempty"` + + // Color The hexadecimal color code used in the space avatar. By default, the color is automatically generated from the space name. + Color *string `json:"color,omitempty"` + + // Description A description for the space. + Description *string `json:"description,omitempty"` + DisabledFeatures *[]string `json:"disabledFeatures,omitempty"` + + // Id The space ID that is part of the Kibana URL when inside the space. Space IDs are limited to lowercase alphanumeric, underscore, and hyphen characters (a-z, 0-9, _, and -). You are cannot change the ID with the update operation. + Id string `json:"id"` + + // ImageUrl The data-URL encoded image to display in the space avatar. If specified, initials will not be displayed and the color will be visible as the background color for transparent images. For best results, your image should be 64x64. Images will not be optimized by this API call, so care should be taken when using custom images. + ImageUrl *string `json:"imageUrl,omitempty"` + + // Initials One or two characters that are shown in the space avatar. By default, the initials are automatically generated from the space name. + Initials *string `json:"initials,omitempty"` + + // Name The display name for the space. + Name string `json:"name"` + Solution *PutSpacesSpaceIdJSONBodySolution `json:"solution,omitempty"` +} + +// PutSpacesSpaceIdJSONBodySolution defines parameters for PutSpacesSpaceId. +type PutSpacesSpaceIdJSONBodySolution string + +// GetStatusParams defines parameters for GetStatus. +type GetStatusParams struct { + // V7format Set to "true" to get the response in v7 format. + V7format *bool `form:"v7format,omitempty" json:"v7format,omitempty"` + + // V8format Set to "true" to get the response in v8 format. + V8format *bool `form:"v8format,omitempty" json:"v8format,omitempty"` +} + +// GetStreamsJSONBody defines parameters for GetStreams. +type GetStreamsJSONBody struct { + union json.RawMessage +} + +// GetStreamsJSONBody0 defines parameters for GetStreams. +type GetStreamsJSONBody0 = map[string]interface{} + +// GetStreamsJSONBody1 defines parameters for GetStreams. +type GetStreamsJSONBody1 = interface{} + +// GetStreamsJSONBody2 defines parameters for GetStreams. +type GetStreamsJSONBody2 = interface{} + +// PostStreamsDisableJSONBody defines parameters for PostStreamsDisable. +type PostStreamsDisableJSONBody struct { + union json.RawMessage +} + +// PostStreamsDisableJSONBody0 defines parameters for PostStreamsDisable. +type PostStreamsDisableJSONBody0 = map[string]interface{} + +// PostStreamsDisableJSONBody1 defines parameters for PostStreamsDisable. +type PostStreamsDisableJSONBody1 = interface{} + +// PostStreamsDisableJSONBody2 defines parameters for PostStreamsDisable. +type PostStreamsDisableJSONBody2 = interface{} + +// PostStreamsEnableJSONBody defines parameters for PostStreamsEnable. +type PostStreamsEnableJSONBody struct { + union json.RawMessage +} + +// PostStreamsEnableJSONBody0 defines parameters for PostStreamsEnable. +type PostStreamsEnableJSONBody0 = map[string]interface{} + +// PostStreamsEnableJSONBody1 defines parameters for PostStreamsEnable. +type PostStreamsEnableJSONBody1 = interface{} + +// PostStreamsEnableJSONBody2 defines parameters for PostStreamsEnable. +type PostStreamsEnableJSONBody2 = interface{} + +// PostStreamsResyncJSONBody defines parameters for PostStreamsResync. +type PostStreamsResyncJSONBody struct { + union json.RawMessage +} + +// PostStreamsResyncJSONBody0 defines parameters for PostStreamsResync. +type PostStreamsResyncJSONBody0 = map[string]interface{} + +// PostStreamsResyncJSONBody1 defines parameters for PostStreamsResync. +type PostStreamsResyncJSONBody1 = interface{} + +// PostStreamsResyncJSONBody2 defines parameters for PostStreamsResync. +type PostStreamsResyncJSONBody2 = interface{} + +// DeleteStreamsNameJSONBody defines parameters for DeleteStreamsName. +type DeleteStreamsNameJSONBody struct { + union json.RawMessage +} + +// DeleteStreamsNameJSONBody0 defines parameters for DeleteStreamsName. +type DeleteStreamsNameJSONBody0 = map[string]interface{} + +// DeleteStreamsNameJSONBody1 defines parameters for DeleteStreamsName. +type DeleteStreamsNameJSONBody1 = interface{} + +// DeleteStreamsNameJSONBody2 defines parameters for DeleteStreamsName. +type DeleteStreamsNameJSONBody2 = interface{} + +// GetStreamsNameJSONBody defines parameters for GetStreamsName. +type GetStreamsNameJSONBody struct { + union json.RawMessage +} + +// GetStreamsNameJSONBody0 defines parameters for GetStreamsName. +type GetStreamsNameJSONBody0 = map[string]interface{} + +// GetStreamsNameJSONBody1 defines parameters for GetStreamsName. +type GetStreamsNameJSONBody1 = interface{} + +// GetStreamsNameJSONBody2 defines parameters for GetStreamsName. +type GetStreamsNameJSONBody2 = interface{} + +// PutStreamsNameJSONBody defines parameters for PutStreamsName. +type PutStreamsNameJSONBody struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00 struct { + Dashboards []string `json:"dashboards"` + Queries []struct { + Id string `json:"id"` + Kql struct { + Query string `json:"query"` + } `json:"kql"` + Title string `json:"title"` + } `json:"queries"` + Rules []string `json:"rules"` + Stream struct { + Ingest struct { + Lifecycle PutStreamsNameJSONBody_0_0_Stream_Ingest_Lifecycle `json:"lifecycle"` + Processing struct { + Steps []PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_Item `json:"steps"` + } `json:"processing"` + } `json:"ingest"` + Name interface{} `json:"name,omitempty"` + } `json:"stream"` +} + +// PutStreamsNameJSONBody00StreamIngestLifecycle0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestLifecycle0 struct { + Dsl struct { + DataRetention *string `json:"data_retention,omitempty"` + } `json:"dsl"` +} + +// PutStreamsNameJSONBody00StreamIngestLifecycle1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestLifecycle1 struct { + Ilm struct { + Policy string `json:"policy"` + } `json:"ilm"` +} + +// PutStreamsNameJSONBody00StreamIngestLifecycle2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestLifecycle2 struct { + Inherit map[string]interface{} `json:"inherit"` +} + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Lifecycle defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Lifecycle struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00 struct { + Action PutStreamsNameJSONBody00StreamIngestProcessingSteps00Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Patterns []string `json:"patterns"` + Where *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Action string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00 struct { + Contains *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Contains0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Contains1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00EndsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Eq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Eq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Neq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Neq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00StartsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps00Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_0_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01 struct { + Action PutStreamsNameJSONBody00StreamIngestProcessingSteps01Action `json:"action"` + AppendSeparator *string `json:"append_separator,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Pattern string `json:"pattern"` + Where *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Action string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00 struct { + Contains *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Contains0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Contains1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00EndsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Eq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Eq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Neq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Neq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00StartsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps01Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_1_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02 struct { + Action PutStreamsNameJSONBody00StreamIngestProcessingSteps02Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + Formats []string `json:"formats"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + OutputFormat *string `json:"output_format,omitempty"` + To *string `json:"to,omitempty"` + Where *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Action string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00 struct { + Contains *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Contains0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Contains1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00EndsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Eq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Eq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Neq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Neq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00StartsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps02Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_2_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03 struct { + Action PutStreamsNameJSONBody00StreamIngestProcessingSteps03Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Override *bool `json:"override,omitempty"` + To string `json:"to"` + Where *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Action string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00 struct { + Contains *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Contains0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Contains1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00EndsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Eq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Eq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Neq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Neq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00StartsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps03Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_3_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04 struct { + Action PutStreamsNameJSONBody00StreamIngestProcessingSteps04Action `json:"action"` + CopyFrom *string `json:"copy_from,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + Override *bool `json:"override,omitempty"` + To string `json:"to"` + Value *string `json:"value,omitempty"` + Where *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Action string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00 struct { + Contains *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Contains0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Contains1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00EndsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Eq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Eq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Neq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Neq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00StartsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps04Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_4_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05 struct { + Action PutStreamsNameJSONBody00StreamIngestProcessingSteps05Action `json:"action"` + AllowDuplicates *bool `json:"allow_duplicates,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + To string `json:"to"` + Value []interface{} `json:"value"` + Where *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Action string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00 struct { + Contains *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Contains0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Contains1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00EndsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Eq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Eq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Neq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Neq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00StartsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps05Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_5_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06 struct { + Action PutStreamsNameJSONBody00StreamIngestProcessingSteps06Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + OnFailure *[]map[string]interface{} `json:"on_failure,omitempty"` + Processors []struct { + Append interface{} `json:"append"` + Attachment interface{} `json:"attachment"` + Bytes interface{} `json:"bytes"` + Circle interface{} `json:"circle"` + CommunityId interface{} `json:"community_id"` + Convert interface{} `json:"convert"` + Csv interface{} `json:"csv"` + Date interface{} `json:"date"` + DateIndexName interface{} `json:"date_index_name"` + Dissect interface{} `json:"dissect"` + DotExpander interface{} `json:"dot_expander"` + Drop interface{} `json:"drop"` + Enrich interface{} `json:"enrich"` + Fail interface{} `json:"fail"` + Fingerprint interface{} `json:"fingerprint"` + Foreach interface{} `json:"foreach"` + GeoGrid interface{} `json:"geo_grid"` + Geoip interface{} `json:"geoip"` + Grok interface{} `json:"grok"` + Gsub interface{} `json:"gsub"` + HtmlStrip interface{} `json:"html_strip"` + Inference interface{} `json:"inference"` + IpLocation interface{} `json:"ip_location"` + Join interface{} `json:"join"` + Json interface{} `json:"json"` + Kv interface{} `json:"kv"` + Lowercase interface{} `json:"lowercase"` + NetworkDirection interface{} `json:"network_direction"` + Pipeline interface{} `json:"pipeline"` + Redact interface{} `json:"redact"` + RegisteredDomain interface{} `json:"registered_domain"` + Remove interface{} `json:"remove"` + Rename interface{} `json:"rename"` + Reroute interface{} `json:"reroute"` + Script interface{} `json:"script"` + Set interface{} `json:"set"` + SetSecurityUser interface{} `json:"set_security_user"` + Sort interface{} `json:"sort"` + Split interface{} `json:"split"` + Terminate interface{} `json:"terminate"` + Trim interface{} `json:"trim"` + Uppercase interface{} `json:"uppercase"` + UriParts interface{} `json:"uri_parts"` + Urldecode interface{} `json:"urldecode"` + UserAgent interface{} `json:"user_agent"` + } `json:"processors"` + Tag *string `json:"tag,omitempty"` + Where *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Action string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00 struct { + Contains *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Contains0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Contains1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00EndsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Eq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Eq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Neq0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Neq1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLt0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLte0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00StartsWith0 = string + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps06Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_0_6_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody00StreamIngestProcessingSteps1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody00StreamIngestProcessingSteps1 struct { + Where struct { + Steps []interface{} `json:"steps"` + } `json:"where"` +} + +// PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_Item defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_0_Stream_Ingest_Processing_Steps_Item struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01 struct { + Dashboards []string `json:"dashboards"` + Queries []struct { + Id string `json:"id"` + Kql struct { + Query string `json:"query"` + } `json:"kql"` + Title string `json:"title"` + } `json:"queries"` + Rules []string `json:"rules"` + Stream struct { + Ingest struct { + Lifecycle PutStreamsNameJSONBody_0_1_Stream_Ingest_Lifecycle `json:"lifecycle"` + Processing struct { + Steps []PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_Item `json:"steps"` + } `json:"processing"` + } `json:"ingest"` + Name interface{} `json:"name,omitempty"` + } `json:"stream"` +} + +// PutStreamsNameJSONBody01StreamIngestLifecycle0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestLifecycle0 struct { + Dsl struct { + DataRetention *string `json:"data_retention,omitempty"` + } `json:"dsl"` +} + +// PutStreamsNameJSONBody01StreamIngestLifecycle1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestLifecycle1 struct { + Ilm struct { + Policy string `json:"policy"` + } `json:"ilm"` +} + +// PutStreamsNameJSONBody01StreamIngestLifecycle2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestLifecycle2 struct { + Inherit map[string]interface{} `json:"inherit"` +} + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Lifecycle defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Lifecycle struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00 struct { + Action PutStreamsNameJSONBody01StreamIngestProcessingSteps00Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Patterns []string `json:"patterns"` + Where *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Action string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00 struct { + Contains *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Contains0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Contains1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00EndsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Eq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Eq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Neq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Neq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00StartsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps00Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_0_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01 struct { + Action PutStreamsNameJSONBody01StreamIngestProcessingSteps01Action `json:"action"` + AppendSeparator *string `json:"append_separator,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Pattern string `json:"pattern"` + Where *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Action string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00 struct { + Contains *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Contains0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Contains1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00EndsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Eq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Eq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Neq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Neq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00StartsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps01Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_1_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02 struct { + Action PutStreamsNameJSONBody01StreamIngestProcessingSteps02Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + Formats []string `json:"formats"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + OutputFormat *string `json:"output_format,omitempty"` + To *string `json:"to,omitempty"` + Where *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Action string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00 struct { + Contains *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Contains0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Contains1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00EndsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Eq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Eq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Neq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Neq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00StartsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps02Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_2_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03 struct { + Action PutStreamsNameJSONBody01StreamIngestProcessingSteps03Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Override *bool `json:"override,omitempty"` + To string `json:"to"` + Where *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Action string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00 struct { + Contains *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Contains0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Contains1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00EndsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Eq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Eq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Neq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Neq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00StartsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps03Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_3_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04 struct { + Action PutStreamsNameJSONBody01StreamIngestProcessingSteps04Action `json:"action"` + CopyFrom *string `json:"copy_from,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + Override *bool `json:"override,omitempty"` + To string `json:"to"` + Value *string `json:"value,omitempty"` + Where *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Action string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00 struct { + Contains *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Contains0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Contains1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00EndsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Eq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Eq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Neq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Neq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00StartsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps04Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_4_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05 struct { + Action PutStreamsNameJSONBody01StreamIngestProcessingSteps05Action `json:"action"` + AllowDuplicates *bool `json:"allow_duplicates,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + To string `json:"to"` + Value []interface{} `json:"value"` + Where *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Action string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00 struct { + Contains *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Contains0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Contains1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00EndsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Eq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Eq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Neq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Neq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00StartsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps05Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_5_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06 struct { + Action PutStreamsNameJSONBody01StreamIngestProcessingSteps06Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + OnFailure *[]map[string]interface{} `json:"on_failure,omitempty"` + Processors []struct { + Append interface{} `json:"append"` + Attachment interface{} `json:"attachment"` + Bytes interface{} `json:"bytes"` + Circle interface{} `json:"circle"` + CommunityId interface{} `json:"community_id"` + Convert interface{} `json:"convert"` + Csv interface{} `json:"csv"` + Date interface{} `json:"date"` + DateIndexName interface{} `json:"date_index_name"` + Dissect interface{} `json:"dissect"` + DotExpander interface{} `json:"dot_expander"` + Drop interface{} `json:"drop"` + Enrich interface{} `json:"enrich"` + Fail interface{} `json:"fail"` + Fingerprint interface{} `json:"fingerprint"` + Foreach interface{} `json:"foreach"` + GeoGrid interface{} `json:"geo_grid"` + Geoip interface{} `json:"geoip"` + Grok interface{} `json:"grok"` + Gsub interface{} `json:"gsub"` + HtmlStrip interface{} `json:"html_strip"` + Inference interface{} `json:"inference"` + IpLocation interface{} `json:"ip_location"` + Join interface{} `json:"join"` + Json interface{} `json:"json"` + Kv interface{} `json:"kv"` + Lowercase interface{} `json:"lowercase"` + NetworkDirection interface{} `json:"network_direction"` + Pipeline interface{} `json:"pipeline"` + Redact interface{} `json:"redact"` + RegisteredDomain interface{} `json:"registered_domain"` + Remove interface{} `json:"remove"` + Rename interface{} `json:"rename"` + Reroute interface{} `json:"reroute"` + Script interface{} `json:"script"` + Set interface{} `json:"set"` + SetSecurityUser interface{} `json:"set_security_user"` + Sort interface{} `json:"sort"` + Split interface{} `json:"split"` + Terminate interface{} `json:"terminate"` + Trim interface{} `json:"trim"` + Uppercase interface{} `json:"uppercase"` + UriParts interface{} `json:"uri_parts"` + Urldecode interface{} `json:"urldecode"` + UserAgent interface{} `json:"user_agent"` + } `json:"processors"` + Tag *string `json:"tag,omitempty"` + Where *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where `json:"where,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Action defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Action string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where0 struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00 struct { + Contains *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Contains0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Contains0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Contains1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Contains1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Contains2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Contains2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Contains defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00EndsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00EndsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00EndsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00EndsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00EndsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00EndsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_EndsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Eq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Eq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Eq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Eq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Eq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Eq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Eq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Gte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Lte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Neq0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Neq0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Neq1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Neq1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Neq2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00Neq2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Neq defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeGte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLt0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLt0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLt1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLt1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLt2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLt2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lt defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLte0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLte0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLte1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLte1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLte2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00RangeLte2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lte defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00StartsWith0 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00StartsWith0 = string + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00StartsWith1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00StartsWith1 = float32 + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00StartsWith2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where00StartsWith2 = bool + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_StartsWith defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where01 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where2 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where3 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where4 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where5 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps06Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_0_6_Where struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody01StreamIngestProcessingSteps1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody01StreamIngestProcessingSteps1 struct { + Where struct { + Steps []interface{} `json:"steps"` + } `json:"where"` +} + +// PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_Item defines parameters for PutStreamsName. +type PutStreamsNameJSONBody_0_1_Stream_Ingest_Processing_Steps_Item struct { + union json.RawMessage +} + +// PutStreamsNameJSONBody1 defines parameters for PutStreamsName. +type PutStreamsNameJSONBody1 struct { + Dashboards []string `json:"dashboards"` + Queries []struct { + Id string `json:"id"` + Kql struct { + Query string `json:"query"` + } `json:"kql"` + Title string `json:"title"` + } `json:"queries"` + Rules []string `json:"rules"` + Stream struct { + Group struct { + Members []string `json:"members"` + Metadata map[string]string `json:"metadata"` + Tags []string `json:"tags"` + } `json:"group"` + Name interface{} `json:"name,omitempty"` + } `json:"stream"` +} + +// PostStreamsNameForkJSONBody defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody struct { + Status *PostStreamsNameForkJSONBodyStatus `json:"status,omitempty"` + Stream struct { + Name string `json:"name"` + } `json:"stream"` + Where PostStreamsNameForkJSONBody_Where `json:"where"` +} + +// PostStreamsNameForkJSONBodyStatus defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyStatus string + +// PostStreamsNameForkJSONBodyWhere0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere0 struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00 struct { + Contains *PostStreamsNameForkJSONBody_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PostStreamsNameForkJSONBody_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PostStreamsNameForkJSONBody_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PostStreamsNameForkJSONBody_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PostStreamsNameForkJSONBody_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PostStreamsNameForkJSONBody_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PostStreamsNameForkJSONBody_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PostStreamsNameForkJSONBody_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PostStreamsNameForkJSONBody_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PostStreamsNameForkJSONBody_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PostStreamsNameForkJSONBody_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PostStreamsNameForkJSONBody_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PostStreamsNameForkJSONBody_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PostStreamsNameForkJSONBodyWhere00Contains0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Contains0 = string + +// PostStreamsNameForkJSONBodyWhere00Contains1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Contains1 = float32 + +// PostStreamsNameForkJSONBodyWhere00Contains2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Contains2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_Contains defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_Contains struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00EndsWith0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00EndsWith0 = string + +// PostStreamsNameForkJSONBodyWhere00EndsWith1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00EndsWith1 = float32 + +// PostStreamsNameForkJSONBodyWhere00EndsWith2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00EndsWith2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_EndsWith defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00Eq0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Eq0 = string + +// PostStreamsNameForkJSONBodyWhere00Eq1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Eq1 = float32 + +// PostStreamsNameForkJSONBodyWhere00Eq2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Eq2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_Eq defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_Eq struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00Gt0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Gt0 = string + +// PostStreamsNameForkJSONBodyWhere00Gt1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Gt1 = float32 + +// PostStreamsNameForkJSONBodyWhere00Gt2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Gt2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_Gt defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_Gt struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00Gte0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Gte0 = string + +// PostStreamsNameForkJSONBodyWhere00Gte1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Gte1 = float32 + +// PostStreamsNameForkJSONBodyWhere00Gte2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Gte2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_Gte defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_Gte struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00Lt0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Lt0 = string + +// PostStreamsNameForkJSONBodyWhere00Lt1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Lt1 = float32 + +// PostStreamsNameForkJSONBodyWhere00Lt2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Lt2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_Lt defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_Lt struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00Lte0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Lte0 = string + +// PostStreamsNameForkJSONBodyWhere00Lte1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Lte1 = float32 + +// PostStreamsNameForkJSONBodyWhere00Lte2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Lte2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_Lte defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_Lte struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00Neq0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Neq0 = string + +// PostStreamsNameForkJSONBodyWhere00Neq1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Neq1 = float32 + +// PostStreamsNameForkJSONBodyWhere00Neq2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00Neq2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_Neq defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_Neq struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00RangeGt0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeGt0 = string + +// PostStreamsNameForkJSONBodyWhere00RangeGt1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeGt1 = float32 + +// PostStreamsNameForkJSONBodyWhere00RangeGt2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeGt2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_Range_Gt defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00RangeGte0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeGte0 = string + +// PostStreamsNameForkJSONBodyWhere00RangeGte1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeGte1 = float32 + +// PostStreamsNameForkJSONBodyWhere00RangeGte2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeGte2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_Range_Gte defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00RangeLt0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeLt0 = string + +// PostStreamsNameForkJSONBodyWhere00RangeLt1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeLt1 = float32 + +// PostStreamsNameForkJSONBodyWhere00RangeLt2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeLt2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_Range_Lt defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00RangeLte0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeLte0 = string + +// PostStreamsNameForkJSONBodyWhere00RangeLte1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeLte1 = float32 + +// PostStreamsNameForkJSONBodyWhere00RangeLte2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00RangeLte2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_Range_Lte defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere00StartsWith0 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00StartsWith0 = string + +// PostStreamsNameForkJSONBodyWhere00StartsWith1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00StartsWith1 = float32 + +// PostStreamsNameForkJSONBodyWhere00StartsWith2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere00StartsWith2 = bool + +// PostStreamsNameForkJSONBody_Where_0_0_StartsWith defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PostStreamsNameForkJSONBodyWhere01 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PostStreamsNameForkJSONBodyWhere1 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere1 struct { + And []interface{} `json:"and"` +} + +// PostStreamsNameForkJSONBodyWhere2 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere2 struct { + Or []interface{} `json:"or"` +} + +// PostStreamsNameForkJSONBodyWhere3 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere3 struct { + Not interface{} `json:"not"` +} + +// PostStreamsNameForkJSONBodyWhere4 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere4 struct { + Never map[string]interface{} `json:"never"` +} + +// PostStreamsNameForkJSONBodyWhere5 defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBodyWhere5 struct { + Always map[string]interface{} `json:"always"` +} + +// PostStreamsNameForkJSONBody_Where defines parameters for PostStreamsNameFork. +type PostStreamsNameForkJSONBody_Where struct { + union json.RawMessage +} + +// GetStreamsNameGroupJSONBody defines parameters for GetStreamsNameGroup. +type GetStreamsNameGroupJSONBody struct { + union json.RawMessage +} + +// GetStreamsNameGroupJSONBody0 defines parameters for GetStreamsNameGroup. +type GetStreamsNameGroupJSONBody0 = map[string]interface{} + +// GetStreamsNameGroupJSONBody1 defines parameters for GetStreamsNameGroup. +type GetStreamsNameGroupJSONBody1 = interface{} + +// GetStreamsNameGroupJSONBody2 defines parameters for GetStreamsNameGroup. +type GetStreamsNameGroupJSONBody2 = interface{} + +// PutStreamsNameGroupJSONBody defines parameters for PutStreamsNameGroup. +type PutStreamsNameGroupJSONBody struct { + Group struct { + Members []string `json:"members"` + Metadata map[string]string `json:"metadata"` + Tags []string `json:"tags"` + } `json:"group"` +} + +// GetStreamsNameIngestJSONBody defines parameters for GetStreamsNameIngest. +type GetStreamsNameIngestJSONBody struct { + union json.RawMessage +} + +// GetStreamsNameIngestJSONBody0 defines parameters for GetStreamsNameIngest. +type GetStreamsNameIngestJSONBody0 = map[string]interface{} + +// GetStreamsNameIngestJSONBody1 defines parameters for GetStreamsNameIngest. +type GetStreamsNameIngestJSONBody1 = interface{} + +// GetStreamsNameIngestJSONBody2 defines parameters for GetStreamsNameIngest. +type GetStreamsNameIngestJSONBody2 = interface{} + +// PutStreamsNameIngestJSONBody defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody struct { + Ingest PutStreamsNameIngestJSONBody_Ingest `json:"ingest"` +} + +// PutStreamsNameIngestJSONBodyIngest0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0 struct { + Lifecycle PutStreamsNameIngestJSONBody_Ingest_0_Lifecycle `json:"lifecycle"` + Processing struct { + Steps []PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_Item `json:"steps"` + } `json:"processing"` + Wired struct { + Fields map[string]map[string]PutStreamsNameIngestJSONBody_Ingest_0_Wired_Fields_AdditionalProperties `json:"fields"` + Routing []struct { + Destination string `json:"destination"` + Status *PutStreamsNameIngestJSONBodyIngest0WiredRoutingStatus `json:"status,omitempty"` + Where PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where `json:"where"` + } `json:"routing"` + } `json:"wired"` +} + +// PutStreamsNameIngestJSONBodyIngest0Lifecycle0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0Lifecycle0 struct { + Dsl struct { + DataRetention *string `json:"data_retention,omitempty"` + } `json:"dsl"` +} + +// PutStreamsNameIngestJSONBodyIngest0Lifecycle1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0Lifecycle1 struct { + Ilm struct { + Policy string `json:"policy"` + } `json:"ilm"` +} + +// PutStreamsNameIngestJSONBodyIngest0Lifecycle2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0Lifecycle2 struct { + Inherit map[string]interface{} `json:"inherit"` +} + +// PutStreamsNameIngestJSONBody_Ingest_0_Lifecycle defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Lifecycle struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00 struct { + Action PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Patterns []string `json:"patterns"` + Where *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Action string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps00Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_0_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01 struct { + Action PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Action `json:"action"` + AppendSeparator *string `json:"append_separator,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Pattern string `json:"pattern"` + Where *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Action string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps01Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_1_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02 struct { + Action PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + Formats []string `json:"formats"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + OutputFormat *string `json:"output_format,omitempty"` + To *string `json:"to,omitempty"` + Where *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Action string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps02Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_2_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03 struct { + Action PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Override *bool `json:"override,omitempty"` + To string `json:"to"` + Where *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Action string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps03Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_3_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04 struct { + Action PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Action `json:"action"` + CopyFrom *string `json:"copy_from,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + Override *bool `json:"override,omitempty"` + To string `json:"to"` + Value *string `json:"value,omitempty"` + Where *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Action string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps04Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_4_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05 struct { + Action PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Action `json:"action"` + AllowDuplicates *bool `json:"allow_duplicates,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + To string `json:"to"` + Value []interface{} `json:"value"` + Where *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Action string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps05Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_5_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06 struct { + Action PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + OnFailure *[]map[string]interface{} `json:"on_failure,omitempty"` + Processors []struct { + Append interface{} `json:"append"` + Attachment interface{} `json:"attachment"` + Bytes interface{} `json:"bytes"` + Circle interface{} `json:"circle"` + CommunityId interface{} `json:"community_id"` + Convert interface{} `json:"convert"` + Csv interface{} `json:"csv"` + Date interface{} `json:"date"` + DateIndexName interface{} `json:"date_index_name"` + Dissect interface{} `json:"dissect"` + DotExpander interface{} `json:"dot_expander"` + Drop interface{} `json:"drop"` + Enrich interface{} `json:"enrich"` + Fail interface{} `json:"fail"` + Fingerprint interface{} `json:"fingerprint"` + Foreach interface{} `json:"foreach"` + GeoGrid interface{} `json:"geo_grid"` + Geoip interface{} `json:"geoip"` + Grok interface{} `json:"grok"` + Gsub interface{} `json:"gsub"` + HtmlStrip interface{} `json:"html_strip"` + Inference interface{} `json:"inference"` + IpLocation interface{} `json:"ip_location"` + Join interface{} `json:"join"` + Json interface{} `json:"json"` + Kv interface{} `json:"kv"` + Lowercase interface{} `json:"lowercase"` + NetworkDirection interface{} `json:"network_direction"` + Pipeline interface{} `json:"pipeline"` + Redact interface{} `json:"redact"` + RegisteredDomain interface{} `json:"registered_domain"` + Remove interface{} `json:"remove"` + Rename interface{} `json:"rename"` + Reroute interface{} `json:"reroute"` + Script interface{} `json:"script"` + Set interface{} `json:"set"` + SetSecurityUser interface{} `json:"set_security_user"` + Sort interface{} `json:"sort"` + Split interface{} `json:"split"` + Terminate interface{} `json:"terminate"` + Trim interface{} `json:"trim"` + Uppercase interface{} `json:"uppercase"` + UriParts interface{} `json:"uri_parts"` + Urldecode interface{} `json:"urldecode"` + UserAgent interface{} `json:"user_agent"` + } `json:"processors"` + Tag *string `json:"tag,omitempty"` + Where *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Action string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps06Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_0_6_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0ProcessingSteps1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0ProcessingSteps1 struct { + Where struct { + Steps []interface{} `json:"steps"` + } `json:"where"` +} + +// PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_Item defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Processing_Steps_Item struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredFields0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredFields00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields00 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredFields01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields01 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredFields02 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields02 = bool + +// PutStreamsNameIngestJSONBodyIngest0WiredFields03 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields03 = interface{} + +// PutStreamsNameIngestJSONBodyIngest0WiredFields04 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields04 = interface{} + +// PutStreamsNameIngestJSONBodyIngest0WiredFields1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields1 = []PutStreamsNameIngestJSONBody_Ingest_0_Wired_Fields_1_Item + +// PutStreamsNameIngestJSONBodyIngest0WiredFields10 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields10 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredFields11 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields11 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredFields12 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields12 = bool + +// PutStreamsNameIngestJSONBodyIngest0WiredFields13 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields13 = interface{} + +// PutStreamsNameIngestJSONBodyIngest0WiredFields14 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields14 = interface{} + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Fields_1_Item defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Fields_1_Item struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredFields2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredFields2 = interface{} + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Fields_AdditionalProperties defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Fields_AdditionalProperties struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingStatus defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingStatus string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest0WiredRoutingWhere5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_0_Wired_Routing_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1 struct { + Classic struct { + FieldOverrides *map[string]map[string]PutStreamsNameIngestJSONBody_Ingest_1_Classic_FieldOverrides_AdditionalProperties `json:"field_overrides,omitempty"` + } `json:"classic"` + Lifecycle PutStreamsNameIngestJSONBody_Ingest_1_Lifecycle `json:"lifecycle"` + Processing struct { + Steps []PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_Item `json:"steps"` + } `json:"processing"` +} + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides00 = string + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides01 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides02 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides02 = bool + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides03 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides03 = interface{} + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides04 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides04 = interface{} + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides1 = []PutStreamsNameIngestJSONBody_Ingest_1_Classic_FieldOverrides_1_Item + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides10 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides10 = string + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides11 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides11 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides12 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides12 = bool + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides13 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides13 = interface{} + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides14 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides14 = interface{} + +// PutStreamsNameIngestJSONBody_Ingest_1_Classic_FieldOverrides_1_Item defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Classic_FieldOverrides_1_Item struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ClassicFieldOverrides2 = interface{} + +// PutStreamsNameIngestJSONBody_Ingest_1_Classic_FieldOverrides_AdditionalProperties defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Classic_FieldOverrides_AdditionalProperties struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1Lifecycle0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1Lifecycle0 struct { + Dsl struct { + DataRetention *string `json:"data_retention,omitempty"` + } `json:"dsl"` +} + +// PutStreamsNameIngestJSONBodyIngest1Lifecycle1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1Lifecycle1 struct { + Ilm struct { + Policy string `json:"policy"` + } `json:"ilm"` +} + +// PutStreamsNameIngestJSONBodyIngest1Lifecycle2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1Lifecycle2 struct { + Inherit map[string]interface{} `json:"inherit"` +} + +// PutStreamsNameIngestJSONBody_Ingest_1_Lifecycle defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Lifecycle struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00 struct { + Action PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Patterns []string `json:"patterns"` + Where *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Action string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps00Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_0_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01 struct { + Action PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Action `json:"action"` + AppendSeparator *string `json:"append_separator,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Pattern string `json:"pattern"` + Where *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Action string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps01Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_1_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02 struct { + Action PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + Formats []string `json:"formats"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + OutputFormat *string `json:"output_format,omitempty"` + To *string `json:"to,omitempty"` + Where *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Action string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps02Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_2_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03 struct { + Action PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + From string `json:"from"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + IgnoreMissing *bool `json:"ignore_missing,omitempty"` + Override *bool `json:"override,omitempty"` + To string `json:"to"` + Where *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Action string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps03Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_3_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04 struct { + Action PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Action `json:"action"` + CopyFrom *string `json:"copy_from,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + Override *bool `json:"override,omitempty"` + To string `json:"to"` + Value *string `json:"value,omitempty"` + Where *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Action string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps04Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_4_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05 struct { + Action PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Action `json:"action"` + AllowDuplicates *bool `json:"allow_duplicates,omitempty"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + To string `json:"to"` + Value []interface{} `json:"value"` + Where *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Action string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps05Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_5_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06 struct { + Action PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Action `json:"action"` + CustomIdentifier *string `json:"customIdentifier,omitempty"` + Description *string `json:"description,omitempty"` + IgnoreFailure *bool `json:"ignore_failure,omitempty"` + OnFailure *[]map[string]interface{} `json:"on_failure,omitempty"` + Processors []struct { + Append interface{} `json:"append"` + Attachment interface{} `json:"attachment"` + Bytes interface{} `json:"bytes"` + Circle interface{} `json:"circle"` + CommunityId interface{} `json:"community_id"` + Convert interface{} `json:"convert"` + Csv interface{} `json:"csv"` + Date interface{} `json:"date"` + DateIndexName interface{} `json:"date_index_name"` + Dissect interface{} `json:"dissect"` + DotExpander interface{} `json:"dot_expander"` + Drop interface{} `json:"drop"` + Enrich interface{} `json:"enrich"` + Fail interface{} `json:"fail"` + Fingerprint interface{} `json:"fingerprint"` + Foreach interface{} `json:"foreach"` + GeoGrid interface{} `json:"geo_grid"` + Geoip interface{} `json:"geoip"` + Grok interface{} `json:"grok"` + Gsub interface{} `json:"gsub"` + HtmlStrip interface{} `json:"html_strip"` + Inference interface{} `json:"inference"` + IpLocation interface{} `json:"ip_location"` + Join interface{} `json:"join"` + Json interface{} `json:"json"` + Kv interface{} `json:"kv"` + Lowercase interface{} `json:"lowercase"` + NetworkDirection interface{} `json:"network_direction"` + Pipeline interface{} `json:"pipeline"` + Redact interface{} `json:"redact"` + RegisteredDomain interface{} `json:"registered_domain"` + Remove interface{} `json:"remove"` + Rename interface{} `json:"rename"` + Reroute interface{} `json:"reroute"` + Script interface{} `json:"script"` + Set interface{} `json:"set"` + SetSecurityUser interface{} `json:"set_security_user"` + Sort interface{} `json:"sort"` + Split interface{} `json:"split"` + Terminate interface{} `json:"terminate"` + Trim interface{} `json:"trim"` + Uppercase interface{} `json:"uppercase"` + UriParts interface{} `json:"uri_parts"` + Urldecode interface{} `json:"urldecode"` + UserAgent interface{} `json:"user_agent"` + } `json:"processors"` + Tag *string `json:"tag,omitempty"` + Where *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where `json:"where,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Action defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Action string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where0 struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00 struct { + Contains *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Contains `json:"contains,omitempty"` + EndsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_EndsWith `json:"endsWith,omitempty"` + Eq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Eq `json:"eq,omitempty"` + Field string `json:"field"` + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Lte `json:"lte,omitempty"` + Neq *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Neq `json:"neq,omitempty"` + Range *struct { + Gt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Gt `json:"gt,omitempty"` + Gte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Gte `json:"gte,omitempty"` + Lt *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Lt `json:"lt,omitempty"` + Lte *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Lte `json:"lte,omitempty"` + } `json:"range,omitempty"` + StartsWith *PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_StartsWith `json:"startsWith,omitempty"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Contains0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Contains0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Contains1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Contains1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Contains2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Contains2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Contains defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Contains struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00EndsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00EndsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00EndsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00EndsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00EndsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00EndsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_EndsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_EndsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Eq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Eq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Eq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Eq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Eq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Eq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Eq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Eq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Gte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Lte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Neq0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Neq0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Neq1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Neq1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Neq2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00Neq2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Neq defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Neq struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Gt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Gt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeGte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Gte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Gte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLt0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLt0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLt1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLt1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLt2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLt2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Lt defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Lt struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLte0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLte0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLte1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLte1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLte2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00RangeLte2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Lte defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_Range_Lte struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00StartsWith0 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00StartsWith0 = string + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00StartsWith1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00StartsWith1 = float32 + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00StartsWith2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where00StartsWith2 = bool + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_StartsWith defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where_0_0_StartsWith struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where01 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where01 struct { + Exists *bool `json:"exists,omitempty"` + Field string `json:"field"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where1 struct { + And []interface{} `json:"and"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where2 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where2 struct { + Or []interface{} `json:"or"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where3 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where3 struct { + Not interface{} `json:"not"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where4 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where4 struct { + Never map[string]interface{} `json:"never"` +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where5 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps06Where5 struct { + Always map[string]interface{} `json:"always"` +} + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_0_6_Where struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBodyIngest1ProcessingSteps1 defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBodyIngest1ProcessingSteps1 struct { + Where struct { + Steps []interface{} `json:"steps"` + } `json:"where"` +} + +// PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_Item defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest_1_Processing_Steps_Item struct { + union json.RawMessage +} + +// PutStreamsNameIngestJSONBody_Ingest defines parameters for PutStreamsNameIngest. +type PutStreamsNameIngestJSONBody_Ingest struct { + union json.RawMessage +} + +// PostStreamsNameContentExportJSONBody defines parameters for PostStreamsNameContentExport. +type PostStreamsNameContentExportJSONBody struct { + Description string `json:"description"` + Include PostStreamsNameContentExportJSONBody_Include `json:"include"` + Name string `json:"name"` + Version string `json:"version"` +} + +// PostStreamsNameContentExportJSONBodyInclude0 defines parameters for PostStreamsNameContentExport. +type PostStreamsNameContentExportJSONBodyInclude0 struct { + Objects struct { + All map[string]interface{} `json:"all"` + } `json:"objects"` +} + +// PostStreamsNameContentExportJSONBodyInclude1 defines parameters for PostStreamsNameContentExport. +type PostStreamsNameContentExportJSONBodyInclude1 struct { + Objects struct { + Queries []struct { + Id string `json:"id"` + } `json:"queries"` + Routing []struct { + Destination string `json:"destination"` + } `json:"routing"` + } `json:"objects"` +} + +// PostStreamsNameContentExportJSONBody_Include defines parameters for PostStreamsNameContentExport. +type PostStreamsNameContentExportJSONBody_Include struct { + union json.RawMessage +} + +// PostStreamsNameContentImportMultipartBody defines parameters for PostStreamsNameContentImport. +type PostStreamsNameContentImportMultipartBody struct { + Content interface{} `json:"content"` + Include string `json:"include"` +} + +// GetStreamsNameDashboardsJSONBody defines parameters for GetStreamsNameDashboards. +type GetStreamsNameDashboardsJSONBody struct { + union json.RawMessage +} + +// GetStreamsNameDashboardsJSONBody0 defines parameters for GetStreamsNameDashboards. +type GetStreamsNameDashboardsJSONBody0 = map[string]interface{} + +// GetStreamsNameDashboardsJSONBody1 defines parameters for GetStreamsNameDashboards. +type GetStreamsNameDashboardsJSONBody1 = interface{} + +// GetStreamsNameDashboardsJSONBody2 defines parameters for GetStreamsNameDashboards. +type GetStreamsNameDashboardsJSONBody2 = interface{} + +// PostStreamsNameDashboardsBulkJSONBody defines parameters for PostStreamsNameDashboardsBulk. +type PostStreamsNameDashboardsBulkJSONBody struct { + Operations []PostStreamsNameDashboardsBulkJSONBody_Operations_Item `json:"operations"` +} + +// PostStreamsNameDashboardsBulkJSONBodyOperations0 defines parameters for PostStreamsNameDashboardsBulk. +type PostStreamsNameDashboardsBulkJSONBodyOperations0 struct { + Index struct { + Id string `json:"id"` + } `json:"index"` +} + +// PostStreamsNameDashboardsBulkJSONBodyOperations1 defines parameters for PostStreamsNameDashboardsBulk. +type PostStreamsNameDashboardsBulkJSONBodyOperations1 struct { + Delete struct { + Id string `json:"id"` + } `json:"delete"` +} + +// PostStreamsNameDashboardsBulkJSONBody_Operations_Item defines parameters for PostStreamsNameDashboardsBulk. +type PostStreamsNameDashboardsBulkJSONBody_Operations_Item struct { + union json.RawMessage +} + +// DeleteStreamsNameDashboardsDashboardidJSONBody defines parameters for DeleteStreamsNameDashboardsDashboardid. +type DeleteStreamsNameDashboardsDashboardidJSONBody struct { + union json.RawMessage +} + +// DeleteStreamsNameDashboardsDashboardidJSONBody0 defines parameters for DeleteStreamsNameDashboardsDashboardid. +type DeleteStreamsNameDashboardsDashboardidJSONBody0 = map[string]interface{} + +// DeleteStreamsNameDashboardsDashboardidJSONBody1 defines parameters for DeleteStreamsNameDashboardsDashboardid. +type DeleteStreamsNameDashboardsDashboardidJSONBody1 = interface{} + +// DeleteStreamsNameDashboardsDashboardidJSONBody2 defines parameters for DeleteStreamsNameDashboardsDashboardid. +type DeleteStreamsNameDashboardsDashboardidJSONBody2 = interface{} + +// PutStreamsNameDashboardsDashboardidJSONBody defines parameters for PutStreamsNameDashboardsDashboardid. +type PutStreamsNameDashboardsDashboardidJSONBody struct { + union json.RawMessage +} + +// PutStreamsNameDashboardsDashboardidJSONBody0 defines parameters for PutStreamsNameDashboardsDashboardid. +type PutStreamsNameDashboardsDashboardidJSONBody0 = map[string]interface{} + +// PutStreamsNameDashboardsDashboardidJSONBody1 defines parameters for PutStreamsNameDashboardsDashboardid. +type PutStreamsNameDashboardsDashboardidJSONBody1 = interface{} + +// PutStreamsNameDashboardsDashboardidJSONBody2 defines parameters for PutStreamsNameDashboardsDashboardid. +type PutStreamsNameDashboardsDashboardidJSONBody2 = interface{} + +// GetStreamsNameQueriesJSONBody defines parameters for GetStreamsNameQueries. +type GetStreamsNameQueriesJSONBody struct { + union json.RawMessage +} + +// GetStreamsNameQueriesJSONBody0 defines parameters for GetStreamsNameQueries. +type GetStreamsNameQueriesJSONBody0 = map[string]interface{} + +// GetStreamsNameQueriesJSONBody1 defines parameters for GetStreamsNameQueries. +type GetStreamsNameQueriesJSONBody1 = interface{} + +// GetStreamsNameQueriesJSONBody2 defines parameters for GetStreamsNameQueries. +type GetStreamsNameQueriesJSONBody2 = interface{} + +// PostStreamsNameQueriesBulkJSONBody defines parameters for PostStreamsNameQueriesBulk. +type PostStreamsNameQueriesBulkJSONBody struct { + Operations []PostStreamsNameQueriesBulkJSONBody_Operations_Item `json:"operations"` +} + +// PostStreamsNameQueriesBulkJSONBodyOperations0 defines parameters for PostStreamsNameQueriesBulk. +type PostStreamsNameQueriesBulkJSONBodyOperations0 struct { + Index struct { + Id string `json:"id"` + Kql struct { + Query string `json:"query"` + } `json:"kql"` + Title string `json:"title"` + } `json:"index"` +} + +// PostStreamsNameQueriesBulkJSONBodyOperations1 defines parameters for PostStreamsNameQueriesBulk. +type PostStreamsNameQueriesBulkJSONBodyOperations1 struct { + Delete struct { + Id string `json:"id"` + } `json:"delete"` +} + +// PostStreamsNameQueriesBulkJSONBody_Operations_Item defines parameters for PostStreamsNameQueriesBulk. +type PostStreamsNameQueriesBulkJSONBody_Operations_Item struct { + union json.RawMessage +} + +// DeleteStreamsNameQueriesQueryidJSONBody defines parameters for DeleteStreamsNameQueriesQueryid. +type DeleteStreamsNameQueriesQueryidJSONBody struct { + union json.RawMessage +} + +// DeleteStreamsNameQueriesQueryidJSONBody0 defines parameters for DeleteStreamsNameQueriesQueryid. +type DeleteStreamsNameQueriesQueryidJSONBody0 = map[string]interface{} + +// DeleteStreamsNameQueriesQueryidJSONBody1 defines parameters for DeleteStreamsNameQueriesQueryid. +type DeleteStreamsNameQueriesQueryidJSONBody1 = interface{} + +// DeleteStreamsNameQueriesQueryidJSONBody2 defines parameters for DeleteStreamsNameQueriesQueryid. +type DeleteStreamsNameQueriesQueryidJSONBody2 = interface{} + +// PutStreamsNameQueriesQueryidJSONBody defines parameters for PutStreamsNameQueriesQueryid. +type PutStreamsNameQueriesQueryidJSONBody struct { + Kql struct { + Query string `json:"query"` + } `json:"kql"` + Title string `json:"title"` +} + +// GetStreamsNameRulesJSONBody defines parameters for GetStreamsNameRules. +type GetStreamsNameRulesJSONBody struct { + union json.RawMessage +} + +// GetStreamsNameRulesJSONBody0 defines parameters for GetStreamsNameRules. +type GetStreamsNameRulesJSONBody0 = map[string]interface{} + +// GetStreamsNameRulesJSONBody1 defines parameters for GetStreamsNameRules. +type GetStreamsNameRulesJSONBody1 = interface{} + +// GetStreamsNameRulesJSONBody2 defines parameters for GetStreamsNameRules. +type GetStreamsNameRulesJSONBody2 = interface{} + +// DeleteStreamsNameRulesRuleidJSONBody defines parameters for DeleteStreamsNameRulesRuleid. +type DeleteStreamsNameRulesRuleidJSONBody struct { + union json.RawMessage +} + +// DeleteStreamsNameRulesRuleidJSONBody0 defines parameters for DeleteStreamsNameRulesRuleid. +type DeleteStreamsNameRulesRuleidJSONBody0 = map[string]interface{} + +// DeleteStreamsNameRulesRuleidJSONBody1 defines parameters for DeleteStreamsNameRulesRuleid. +type DeleteStreamsNameRulesRuleidJSONBody1 = interface{} + +// DeleteStreamsNameRulesRuleidJSONBody2 defines parameters for DeleteStreamsNameRulesRuleid. +type DeleteStreamsNameRulesRuleidJSONBody2 = interface{} + +// PutStreamsNameRulesRuleidJSONBody defines parameters for PutStreamsNameRulesRuleid. +type PutStreamsNameRulesRuleidJSONBody struct { + union json.RawMessage +} + +// PutStreamsNameRulesRuleidJSONBody0 defines parameters for PutStreamsNameRulesRuleid. +type PutStreamsNameRulesRuleidJSONBody0 = map[string]interface{} + +// PutStreamsNameRulesRuleidJSONBody1 defines parameters for PutStreamsNameRulesRuleid. +type PutStreamsNameRulesRuleidJSONBody1 = interface{} + +// PutStreamsNameRulesRuleidJSONBody2 defines parameters for PutStreamsNameRulesRuleid. +type PutStreamsNameRulesRuleidJSONBody2 = interface{} + +// GetStreamsNameSignificantEventsJSONBody defines parameters for GetStreamsNameSignificantEvents. +type GetStreamsNameSignificantEventsJSONBody struct { + union json.RawMessage +} + +// GetStreamsNameSignificantEventsParams defines parameters for GetStreamsNameSignificantEvents. +type GetStreamsNameSignificantEventsParams struct { + From string `form:"from" json:"from"` + To string `form:"to" json:"to"` + BucketSize string `form:"bucketSize" json:"bucketSize"` +} + +// GetStreamsNameSignificantEventsJSONBody0 defines parameters for GetStreamsNameSignificantEvents. +type GetStreamsNameSignificantEventsJSONBody0 = map[string]interface{} + +// GetStreamsNameSignificantEventsJSONBody1 defines parameters for GetStreamsNameSignificantEvents. +type GetStreamsNameSignificantEventsJSONBody1 = interface{} + +// GetStreamsNameSignificantEventsJSONBody2 defines parameters for GetStreamsNameSignificantEvents. +type GetStreamsNameSignificantEventsJSONBody2 = interface{} + +// GetStreamsNameSignificantEventsGenerateJSONBody defines parameters for GetStreamsNameSignificantEventsGenerate. +type GetStreamsNameSignificantEventsGenerateJSONBody struct { + union json.RawMessage +} + +// GetStreamsNameSignificantEventsGenerateParams defines parameters for GetStreamsNameSignificantEventsGenerate. +type GetStreamsNameSignificantEventsGenerateParams struct { + ConnectorId string `form:"connectorId" json:"connectorId"` + CurrentDate *string `form:"currentDate,omitempty" json:"currentDate,omitempty"` + ShortLookback *string `form:"shortLookback,omitempty" json:"shortLookback,omitempty"` + LongLookback *string `form:"longLookback,omitempty" json:"longLookback,omitempty"` +} + +// GetStreamsNameSignificantEventsGenerateJSONBody0 defines parameters for GetStreamsNameSignificantEventsGenerate. +type GetStreamsNameSignificantEventsGenerateJSONBody0 = map[string]interface{} + +// GetStreamsNameSignificantEventsGenerateJSONBody1 defines parameters for GetStreamsNameSignificantEventsGenerate. +type GetStreamsNameSignificantEventsGenerateJSONBody1 = interface{} + +// GetStreamsNameSignificantEventsGenerateJSONBody2 defines parameters for GetStreamsNameSignificantEventsGenerate. +type GetStreamsNameSignificantEventsGenerateJSONBody2 = interface{} + +// PostStreamsNameSignificantEventsPreviewJSONBody defines parameters for PostStreamsNameSignificantEventsPreview. +type PostStreamsNameSignificantEventsPreviewJSONBody struct { + Query struct { + Kql struct { + Query string `json:"query"` + } `json:"kql"` + } `json:"query"` +} + +// PostStreamsNameSignificantEventsPreviewParams defines parameters for PostStreamsNameSignificantEventsPreview. +type PostStreamsNameSignificantEventsPreviewParams struct { + From string `form:"from" json:"from"` + To string `form:"to" json:"to"` + BucketSize string `form:"bucketSize" json:"bucketSize"` +} + +// GetSyntheticMonitorsParams defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParams struct { + // Filter Additional filtering criteria. + Filter *string `form:"filter,omitempty" json:"filter,omitempty"` + + // Locations The locations to filter by. + Locations *struct { + union json.RawMessage + } `form:"locations,omitempty" json:"locations,omitempty"` + + // MonitorTypes The monitor types to filter. + MonitorTypes *struct { + union json.RawMessage + } `form:"monitorTypes,omitempty" json:"monitorTypes,omitempty"` + + // Page The page number for paginated results. + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage The number of items to return per page. + PerPage *int `form:"per_page,omitempty" json:"per_page,omitempty"` + + // Projects The projects to filter by. + Projects *struct { + union json.RawMessage + } `form:"projects,omitempty" json:"projects,omitempty"` + + // Query A free-text query string. + Query *string `form:"query,omitempty" json:"query,omitempty"` + + // Schedules The schedules to filter by. + Schedules *struct { + union json.RawMessage + } `form:"schedules,omitempty" json:"schedules,omitempty"` + + // SortField The field to sort the results by. + SortField *GetSyntheticMonitorsParamsSortField `form:"sortField,omitempty" json:"sortField,omitempty"` + + // SortOrder The sort order. + SortOrder *GetSyntheticMonitorsParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` + + // Status The status to filter by. + Status *struct { + union json.RawMessage + } `form:"status,omitempty" json:"status,omitempty"` + + // Tags Tags to filter monitors. + Tags *struct { + union json.RawMessage + } `form:"tags,omitempty" json:"tags,omitempty"` + + // UseLogicalAndFor Specifies whether to apply logical AND filtering for specific fields. Accepts either a string with values "tags" or "locations" or an array containing both. + UseLogicalAndFor *[]GetSyntheticMonitorsParamsUseLogicalAndFor `form:"useLogicalAndFor,omitempty" json:"useLogicalAndFor,omitempty"` +} + +// GetSyntheticMonitorsParamsLocations0 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsLocations0 = string + +// GetSyntheticMonitorsParamsLocations1 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsLocations1 = []interface{} + +// GetSyntheticMonitorsParamsMonitorTypes0 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsMonitorTypes0 string + +// GetSyntheticMonitorsParamsMonitorTypes1 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsMonitorTypes1 = []interface{} + +// GetSyntheticMonitorsParamsProjects0 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsProjects0 = string + +// GetSyntheticMonitorsParamsProjects1 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsProjects1 = []interface{} + +// GetSyntheticMonitorsParamsSchedules0 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsSchedules0 = []interface{} + +// GetSyntheticMonitorsParamsSchedules1 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsSchedules1 = string + +// GetSyntheticMonitorsParamsSortField defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsSortField string + +// GetSyntheticMonitorsParamsSortOrder defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsSortOrder string + +// GetSyntheticMonitorsParamsStatus0 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsStatus0 = []interface{} + +// GetSyntheticMonitorsParamsStatus1 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsStatus1 = string + +// GetSyntheticMonitorsParamsTags0 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsTags0 = string + +// GetSyntheticMonitorsParamsTags1 defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsTags1 = []interface{} + +// GetSyntheticMonitorsParamsUseLogicalAndFor defines parameters for GetSyntheticMonitors. +type GetSyntheticMonitorsParamsUseLogicalAndFor string + +// PostSyntheticMonitorsJSONBody defines parameters for PostSyntheticMonitors. +type PostSyntheticMonitorsJSONBody struct { + union json.RawMessage +} + +// DeleteSyntheticMonitorsJSONBody defines parameters for DeleteSyntheticMonitors. +type DeleteSyntheticMonitorsJSONBody struct { + // Ids An array of monitor IDs to delete. + Ids []string `json:"ids"` +} + +// PutSyntheticMonitorJSONBody defines parameters for PutSyntheticMonitor. +type PutSyntheticMonitorJSONBody struct { + union json.RawMessage +} + +// PostParametersJSONBody defines parameters for PostParameters. +type PostParametersJSONBody struct { + union json.RawMessage +} + +// PostParametersJSONBody0 defines parameters for PostParameters. +type PostParametersJSONBody0 = []SyntheticsParameterRequest + +// DeleteParametersJSONBody defines parameters for DeleteParameters. +type DeleteParametersJSONBody = map[string]interface{} + +// PutParameterJSONBody defines parameters for PutParameter. +type PutParameterJSONBody struct { + // Description The updated description of the parameter. + Description *string `json:"description,omitempty"` + + // Key The key of the parameter. + Key *string `json:"key,omitempty"` + + // Tags An array of updated tags to categorize the parameter. + Tags *[]string `json:"tags,omitempty"` + + // Value The updated value associated with the parameter. + Value *string `json:"value,omitempty"` +} + +// PostPrivateLocationJSONBody defines parameters for PostPrivateLocation. +type PostPrivateLocationJSONBody struct { + // AgentPolicyId The ID of the agent policy associated with the private location. + AgentPolicyId string `json:"agentPolicyId"` + + // Geo Geographic coordinates (WGS84) for the location. + Geo *struct { + // Lat The latitude of the location. + Lat float32 `json:"lat"` + + // Lon The longitude of the location. + Lon float32 `json:"lon"` + } `json:"geo,omitempty"` + + // Label A label for the private location. + Label string `json:"label"` + + // Spaces An array of space IDs where the private location is available. If it is not provided, the private location is available in all spaces. + Spaces *[]string `json:"spaces,omitempty"` + + // Tags An array of tags to categorize the private location. + Tags *[]string `json:"tags,omitempty"` +} + +// PutPrivateLocationJSONBody defines parameters for PutPrivateLocation. +type PutPrivateLocationJSONBody struct { + // Label A new label for the private location. Must be at least 1 character long. + Label string `json:"label"` +} + +// DeleteTimelinesJSONBody defines parameters for DeleteTimelines. +type DeleteTimelinesJSONBody struct { + // SavedObjectIds The list of IDs of the Timelines or Timeline templates to delete + SavedObjectIds []string `json:"savedObjectIds"` + + // SearchIds Saved search IDs that should be deleted alongside the timelines + SearchIds *[]string `json:"searchIds,omitempty"` +} + +// GetTimelineParams defines parameters for GetTimeline. +type GetTimelineParams struct { + // TemplateTimelineId The `savedObjectId` of the template timeline to retrieve + TemplateTimelineId *string `form:"template_timeline_id,omitempty" json:"template_timeline_id,omitempty"` + + // Id The `savedObjectId` of the Timeline to retrieve. + Id *string `form:"id,omitempty" json:"id,omitempty"` +} + +// PatchTimelineJSONBody defines parameters for PatchTimeline. +type PatchTimelineJSONBody struct { + Timeline SecurityTimelineAPISavedTimeline `json:"timeline"` + + // TimelineId The `savedObjectId` of the Timeline or Timeline template that you’re updating. + TimelineId *string `json:"timelineId,omitempty"` + + // Version The version of the Timeline or Timeline template that you’re updating. + Version *string `json:"version,omitempty"` +} + +// CreateTimelinesJSONBody defines parameters for CreateTimelines. +type CreateTimelinesJSONBody struct { + // Status The status of the Timeline. + Status *SecurityTimelineAPITimelineStatus `json:"status,omitempty"` + + // TemplateTimelineId A unique identifier for the Timeline template. + TemplateTimelineId *string `json:"templateTimelineId,omitempty"` + + // TemplateTimelineVersion Timeline template version number. + TemplateTimelineVersion *float32 `json:"templateTimelineVersion,omitempty"` + Timeline SecurityTimelineAPISavedTimeline `json:"timeline"` + + // TimelineId A unique identifier for the Timeline. + TimelineId *string `json:"timelineId,omitempty"` + + // TimelineType The type of Timeline. + TimelineType *SecurityTimelineAPITimelineType `json:"timelineType,omitempty"` + Version *string `json:"version,omitempty"` +} + +// CopyTimelineJSONBody defines parameters for CopyTimeline. +type CopyTimelineJSONBody struct { + Timeline SecurityTimelineAPISavedTimeline `json:"timeline"` + TimelineIdToCopy string `json:"timelineIdToCopy"` +} + +// GetDraftTimelinesParams defines parameters for GetDraftTimelines. +type GetDraftTimelinesParams struct { + TimelineType SecurityTimelineAPITimelineType `form:"timelineType" json:"timelineType"` +} + +// CleanDraftTimelinesJSONBody defines parameters for CleanDraftTimelines. +type CleanDraftTimelinesJSONBody struct { + // TimelineType The type of Timeline. + TimelineType SecurityTimelineAPITimelineType `json:"timelineType"` +} + +// ExportTimelinesJSONBody defines parameters for ExportTimelines. +type ExportTimelinesJSONBody struct { + Ids *[]string `json:"ids,omitempty"` +} + +// ExportTimelinesParams defines parameters for ExportTimelines. +type ExportTimelinesParams struct { + // FileName The name of the file to export + FileName string `form:"file_name" json:"file_name"` +} + +// PersistFavoriteRouteJSONBody defines parameters for PersistFavoriteRoute. +type PersistFavoriteRouteJSONBody struct { + TemplateTimelineId *string `json:"templateTimelineId,omitempty"` + TemplateTimelineVersion *float32 `json:"templateTimelineVersion,omitempty"` + TimelineId *string `json:"timelineId,omitempty"` + + // TimelineType The type of Timeline. + TimelineType SecurityTimelineAPITimelineType `json:"timelineType"` +} + +// ImportTimelinesJSONBody defines parameters for ImportTimelines. +type ImportTimelinesJSONBody struct { + File interface{} `json:"file"` + + // IsImmutable Whether the Timeline should be immutable + IsImmutable *ImportTimelinesJSONBodyIsImmutable `json:"isImmutable,omitempty"` +} + +// ImportTimelinesJSONBodyIsImmutable defines parameters for ImportTimelines. +type ImportTimelinesJSONBodyIsImmutable string + +// InstallPrepackedTimelinesJSONBody defines parameters for InstallPrepackedTimelines. +type InstallPrepackedTimelinesJSONBody struct { + PrepackagedTimelines []SecurityTimelineAPITimelineSavedToReturnObject `json:"prepackagedTimelines"` + TimelinesToInstall []SecurityTimelineAPIImportTimelines `json:"timelinesToInstall"` + TimelinesToUpdate []SecurityTimelineAPIImportTimelines `json:"timelinesToUpdate"` +} + +// ResolveTimelineParams defines parameters for ResolveTimeline. +type ResolveTimelineParams struct { + // TemplateTimelineId The ID of the template timeline to resolve + TemplateTimelineId *string `form:"template_timeline_id,omitempty" json:"template_timeline_id,omitempty"` + + // Id The ID of the timeline to resolve + Id *string `form:"id,omitempty" json:"id,omitempty"` +} + +// GetTimelinesParams defines parameters for GetTimelines. +type GetTimelinesParams struct { + // OnlyUserFavorite If true, only timelines that are marked as favorites by the user are returned. + OnlyUserFavorite *GetTimelinesParamsOnlyUserFavorite `form:"only_user_favorite,omitempty" json:"only_user_favorite,omitempty"` + TimelineType *SecurityTimelineAPITimelineType `form:"timeline_type,omitempty" json:"timeline_type,omitempty"` + SortField *SecurityTimelineAPISortFieldTimeline `form:"sort_field,omitempty" json:"sort_field,omitempty"` + + // SortOrder Whether to sort the results `ascending` or `descending` + SortOrder *GetTimelinesParamsSortOrder `form:"sort_order,omitempty" json:"sort_order,omitempty"` + + // PageSize How many results should returned at once + PageSize *string `form:"page_size,omitempty" json:"page_size,omitempty"` + + // PageIndex How many pages should be skipped + PageIndex *string `form:"page_index,omitempty" json:"page_index,omitempty"` + + // Search Allows to search for timelines by their title + Search *string `form:"search,omitempty" json:"search,omitempty"` + Status *SecurityTimelineAPITimelineStatus `form:"status,omitempty" json:"status,omitempty"` +} + +// GetTimelinesParamsOnlyUserFavorite defines parameters for GetTimelines. +type GetTimelinesParamsOnlyUserFavorite string + +// GetTimelinesParamsSortOrder defines parameters for GetTimelines. +type GetTimelinesParamsSortOrder string + +// PutUptimeSettingsJSONBody defines parameters for PutUptimeSettings. +type PutUptimeSettingsJSONBody struct { + // CertAgeThreshold The number of days after a certificate is created to trigger an alert. + CertAgeThreshold *float32 `json:"certAgeThreshold,omitempty"` + + // CertExpirationThreshold The number of days before a certificate expires to trigger an alert. + CertExpirationThreshold *float32 `json:"certExpirationThreshold,omitempty"` + + // DefaultConnectors A list of connector IDs to be used as default connectors for new alerts. + DefaultConnectors *[]interface{} `json:"defaultConnectors,omitempty"` + + // DefaultEmail The default email configuration for new alerts. + DefaultEmail *struct { + Bcc *[]string `json:"bcc,omitempty"` + Cc *[]string `json:"cc,omitempty"` + To *[]string `json:"to,omitempty"` + } `json:"defaultEmail,omitempty"` + + // HeartbeatIndices An index pattern string to be used within the Uptime app and alerts to query Heartbeat data. + HeartbeatIndices *string `json:"heartbeatIndices,omitempty"` +} + +// PostActionsConnectorIdJSONBody defines parameters for PostActionsConnectorId. +type PostActionsConnectorIdJSONBody struct { + // Config The connector configuration details. + Config *CreateConnectorConfig `json:"config,omitempty"` + + // ConnectorTypeId The type of connector. + ConnectorTypeId string `json:"connector_type_id"` + + // Name The display name for the connector. + Name string `json:"name"` + Secrets *CreateConnectorSecrets `json:"secrets,omitempty"` +} + +// PutActionsConnectorIdJSONBody defines parameters for PutActionsConnectorId. +type PutActionsConnectorIdJSONBody struct { + // Config The connector configuration details. + Config *UpdateConnectorConfig `json:"config,omitempty"` + + // Name The display name for the connector. + Name string `json:"name"` + Secrets *UpdateConnectorSecrets `json:"secrets,omitempty"` +} + +// PostMaintenanceWindowJSONBody defines parameters for PostMaintenanceWindow. +type PostMaintenanceWindowJSONBody struct { + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled *bool `json:"enabled,omitempty"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). Only alerts matching this query will be supressed by the maintenance window. + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Title The name of the maintenance window. While this name does not have to be unique, a distinctive name can help you identify a specific maintenance window. + Title string `json:"title"` +} + +// PatchMaintenanceWindowIdJSONBody defines parameters for PatchMaintenanceWindowId. +type PatchMaintenanceWindowIdJSONBody struct { + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled *bool `json:"enabled,omitempty"` + Schedule *struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule,omitempty"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). Only alerts matching this query will be supressed by the maintenance window. + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Title The name of the maintenance window. While this name does not have to be unique, a distinctive name can help you identify a specific maintenance window. + Title *string `json:"title,omitempty"` +} + +// FindSlosOpParams defines parameters for FindSlosOp. +type FindSlosOpParams struct { + // KqlQuery A valid kql query to filter the SLO with + KqlQuery *string `form:"kqlQuery,omitempty" json:"kqlQuery,omitempty"` + + // Size The page size to use for cursor-based pagination, must be greater or equal than 1 + Size *int `form:"size,omitempty" json:"size,omitempty"` + + // SearchAfter The cursor to use for fetching the results from, when using a cursor-base pagination. + SearchAfter *[]string `form:"searchAfter,omitempty" json:"searchAfter,omitempty"` + + // Page The page to use for pagination, must be greater or equal than 1 + Page *int `form:"page,omitempty" json:"page,omitempty"` + + // PerPage Number of SLOs returned by page + PerPage *int `form:"perPage,omitempty" json:"perPage,omitempty"` + + // SortBy Sort by field + SortBy *FindSlosOpParamsSortBy `form:"sortBy,omitempty" json:"sortBy,omitempty"` + + // SortDirection Sort order + SortDirection *FindSlosOpParamsSortDirection `form:"sortDirection,omitempty" json:"sortDirection,omitempty"` + + // HideStale Hide stale SLOs from the list as defined by stale SLO threshold in SLO settings + HideStale *bool `form:"hideStale,omitempty" json:"hideStale,omitempty"` +} + +// FindSlosOpParamsSortBy defines parameters for FindSlosOp. +type FindSlosOpParamsSortBy string + +// FindSlosOpParamsSortDirection defines parameters for FindSlosOp. +type FindSlosOpParamsSortDirection string + +// GetSloOpParams defines parameters for GetSloOp. +type GetSloOpParams struct { + // InstanceId the specific instanceId used by the summary calculation + InstanceId *string `form:"instanceId,omitempty" json:"instanceId,omitempty"` +} + +// GetDefinitionsOpParams defines parameters for GetDefinitionsOp. +type GetDefinitionsOpParams struct { + // IncludeOutdatedOnly Indicates if the API returns only outdated SLO or all SLO definitions + IncludeOutdatedOnly *bool `form:"includeOutdatedOnly,omitempty" json:"includeOutdatedOnly,omitempty"` + + // Tags Filters the SLOs by tag + Tags *string `form:"tags,omitempty" json:"tags,omitempty"` + + // Search Filters the SLOs by name + Search *string `form:"search,omitempty" json:"search,omitempty"` + + // Page The page to use for pagination, must be greater or equal than 1 + Page *float32 `form:"page,omitempty" json:"page,omitempty"` + + // PerPage Number of SLOs returned by page + PerPage *int `form:"perPage,omitempty" json:"perPage,omitempty"` +} + +// PostActionsConnectorIdExecuteJSONRequestBody defines body for PostActionsConnectorIdExecute for application/json ContentType. +type PostActionsConnectorIdExecuteJSONRequestBody PostActionsConnectorIdExecuteJSONBody + +// PostAlertingRuleIdJSONRequestBody defines body for PostAlertingRuleId for application/json ContentType. +type PostAlertingRuleIdJSONRequestBody PostAlertingRuleIdJSONBody + +// PutAlertingRuleIdJSONRequestBody defines body for PutAlertingRuleId for application/json ContentType. +type PutAlertingRuleIdJSONRequestBody PutAlertingRuleIdJSONBody + +// PostAlertingRuleIdDisableJSONRequestBody defines body for PostAlertingRuleIdDisable for application/json ContentType. +type PostAlertingRuleIdDisableJSONRequestBody PostAlertingRuleIdDisableJSONBody + +// PostAlertingRuleIdSnoozeScheduleJSONRequestBody defines body for PostAlertingRuleIdSnoozeSchedule for application/json ContentType. +type PostAlertingRuleIdSnoozeScheduleJSONRequestBody PostAlertingRuleIdSnoozeScheduleJSONBody + +// CreateAgentKeyJSONRequestBody defines body for CreateAgentKey for application/json ContentType. +type CreateAgentKeyJSONRequestBody = APMUIAgentKeysObject + +// SaveApmServerSchemaJSONRequestBody defines body for SaveApmServerSchema for application/json ContentType. +type SaveApmServerSchemaJSONRequestBody SaveApmServerSchemaJSONBody + +// CreateAnnotationJSONRequestBody defines body for CreateAnnotation for application/json ContentType. +type CreateAnnotationJSONRequestBody = APMUICreateAnnotationObject + +// DeleteAgentConfigurationJSONRequestBody defines body for DeleteAgentConfiguration for application/json ContentType. +type DeleteAgentConfigurationJSONRequestBody = APMUIDeleteServiceObject + +// CreateUpdateAgentConfigurationJSONRequestBody defines body for CreateUpdateAgentConfiguration for application/json ContentType. +type CreateUpdateAgentConfigurationJSONRequestBody = APMUIAgentConfigurationIntakeObject + +// SearchSingleConfigurationJSONRequestBody defines body for SearchSingleConfiguration for application/json ContentType. +type SearchSingleConfigurationJSONRequestBody = APMUISearchAgentConfigurationObject + +// UploadSourceMapMultipartRequestBody defines body for UploadSourceMap for multipart/form-data ContentType. +type UploadSourceMapMultipartRequestBody = APMUIUploadSourceMapObject + +// CreateAssetCriticalityRecordJSONRequestBody defines body for CreateAssetCriticalityRecord for application/json ContentType. +type CreateAssetCriticalityRecordJSONRequestBody CreateAssetCriticalityRecordJSONBody + +// BulkUpsertAssetCriticalityRecordsJSONRequestBody defines body for BulkUpsertAssetCriticalityRecords for application/json ContentType. +type BulkUpsertAssetCriticalityRecordsJSONRequestBody BulkUpsertAssetCriticalityRecordsJSONBody + +// UpdateCaseDefaultSpaceJSONRequestBody defines body for UpdateCaseDefaultSpace for application/json ContentType. +type UpdateCaseDefaultSpaceJSONRequestBody = CasesUpdateCaseRequest + +// CreateCaseDefaultSpaceJSONRequestBody defines body for CreateCaseDefaultSpace for application/json ContentType. +type CreateCaseDefaultSpaceJSONRequestBody = CasesCreateCaseRequest + +// SetCaseConfigurationDefaultSpaceJSONRequestBody defines body for SetCaseConfigurationDefaultSpace for application/json ContentType. +type SetCaseConfigurationDefaultSpaceJSONRequestBody = CasesSetCaseConfigurationRequest + +// UpdateCaseConfigurationDefaultSpaceJSONRequestBody defines body for UpdateCaseConfigurationDefaultSpace for application/json ContentType. +type UpdateCaseConfigurationDefaultSpaceJSONRequestBody = CasesUpdateCaseConfigurationRequest + +// UpdateCaseCommentDefaultSpaceJSONRequestBody defines body for UpdateCaseCommentDefaultSpace for application/json ContentType. +type UpdateCaseCommentDefaultSpaceJSONRequestBody = CasesUpdateCaseCommentRequest + +// AddCaseCommentDefaultSpaceJSONRequestBody defines body for AddCaseCommentDefaultSpace for application/json ContentType. +type AddCaseCommentDefaultSpaceJSONRequestBody = CasesAddCaseCommentRequest + +// PushCaseDefaultSpaceJSONRequestBody defines body for PushCaseDefaultSpace for application/json ContentType. +type PushCaseDefaultSpaceJSONRequestBody = PushCaseDefaultSpaceJSONBody + +// AddCaseFileDefaultSpaceMultipartRequestBody defines body for AddCaseFileDefaultSpace for multipart/form-data ContentType. +type AddCaseFileDefaultSpaceMultipartRequestBody = CasesAddCaseFileRequest + +// UpdateFieldsMetadataDefaultJSONRequestBody defines body for UpdateFieldsMetadataDefault for application/json ContentType. +type UpdateFieldsMetadataDefaultJSONRequestBody UpdateFieldsMetadataDefaultJSONBody + +// CreateRuntimeFieldDefaultJSONRequestBody defines body for CreateRuntimeFieldDefault for application/json ContentType. +type CreateRuntimeFieldDefaultJSONRequestBody CreateRuntimeFieldDefaultJSONBody + +// CreateUpdateRuntimeFieldDefaultJSONRequestBody defines body for CreateUpdateRuntimeFieldDefault for application/json ContentType. +type CreateUpdateRuntimeFieldDefaultJSONRequestBody CreateUpdateRuntimeFieldDefaultJSONBody + +// UpdateRuntimeFieldDefaultJSONRequestBody defines body for UpdateRuntimeFieldDefault for application/json ContentType. +type UpdateRuntimeFieldDefaultJSONRequestBody UpdateRuntimeFieldDefaultJSONBody + +// SetDefaultDatailViewDefaultJSONRequestBody defines body for SetDefaultDatailViewDefault for application/json ContentType. +type SetDefaultDatailViewDefaultJSONRequestBody SetDefaultDatailViewDefaultJSONBody + +// SwapDataViewsDefaultJSONRequestBody defines body for SwapDataViewsDefault for application/json ContentType. +type SwapDataViewsDefaultJSONRequestBody = DataViewsSwapDataViewRequestObject + +// PreviewSwapDataViewsDefaultJSONRequestBody defines body for PreviewSwapDataViewsDefault for application/json ContentType. +type PreviewSwapDataViewsDefaultJSONRequestBody = DataViewsSwapDataViewRequestObject + +// PatchRuleJSONRequestBody defines body for PatchRule for application/json ContentType. +type PatchRuleJSONRequestBody = SecurityDetectionsAPIRulePatchProps + +// CreateRuleJSONRequestBody defines body for CreateRule for application/json ContentType. +type CreateRuleJSONRequestBody = SecurityDetectionsAPIRuleCreateProps + +// UpdateRuleJSONRequestBody defines body for UpdateRule for application/json ContentType. +type UpdateRuleJSONRequestBody = SecurityDetectionsAPIRuleUpdateProps + +// PerformRulesBulkActionJSONRequestBody defines body for PerformRulesBulkAction for application/json ContentType. +type PerformRulesBulkActionJSONRequestBody PerformRulesBulkActionJSONBody + +// ExportRulesJSONRequestBody defines body for ExportRules for application/json ContentType. +type ExportRulesJSONRequestBody ExportRulesJSONBody + +// ImportRulesMultipartRequestBody defines body for ImportRules for multipart/form-data ContentType. +type ImportRulesMultipartRequestBody ImportRulesMultipartBody + +// RulePreviewJSONRequestBody defines body for RulePreview for application/json ContentType. +type RulePreviewJSONRequestBody RulePreviewJSONBody + +// CreateRuleExceptionListItemsJSONRequestBody defines body for CreateRuleExceptionListItems for application/json ContentType. +type CreateRuleExceptionListItemsJSONRequestBody CreateRuleExceptionListItemsJSONBody + +// SetAlertAssigneesJSONRequestBody defines body for SetAlertAssignees for application/json ContentType. +type SetAlertAssigneesJSONRequestBody SetAlertAssigneesJSONBody + +// FinalizeAlertsMigrationJSONRequestBody defines body for FinalizeAlertsMigration for application/json ContentType. +type FinalizeAlertsMigrationJSONRequestBody FinalizeAlertsMigrationJSONBody + +// AlertsMigrationCleanupJSONRequestBody defines body for AlertsMigrationCleanup for application/json ContentType. +type AlertsMigrationCleanupJSONRequestBody AlertsMigrationCleanupJSONBody + +// CreateAlertsMigrationJSONRequestBody defines body for CreateAlertsMigration for application/json ContentType. +type CreateAlertsMigrationJSONRequestBody CreateAlertsMigrationJSONBody + +// SearchAlertsJSONRequestBody defines body for SearchAlerts for application/json ContentType. +type SearchAlertsJSONRequestBody SearchAlertsJSONBody + +// SetAlertsStatusJSONRequestBody defines body for SetAlertsStatus for application/json ContentType. +type SetAlertsStatusJSONRequestBody SetAlertsStatusJSONBody + +// SetAlertTagsJSONRequestBody defines body for SetAlertTags for application/json ContentType. +type SetAlertTagsJSONRequestBody SetAlertTagsJSONBody + +// EndpointExecuteActionJSONRequestBody defines body for EndpointExecuteAction for application/json ContentType. +type EndpointExecuteActionJSONRequestBody = SecurityEndpointManagementAPIExecuteRouteRequestBody + +// EndpointGetFileActionJSONRequestBody defines body for EndpointGetFileAction for application/json ContentType. +type EndpointGetFileActionJSONRequestBody = SecurityEndpointManagementAPIGetFileRouteRequestBody + +// EndpointIsolateActionJSONRequestBody defines body for EndpointIsolateAction for application/json ContentType. +type EndpointIsolateActionJSONRequestBody EndpointIsolateActionJSONBody + +// EndpointKillProcessActionJSONRequestBody defines body for EndpointKillProcessAction for application/json ContentType. +type EndpointKillProcessActionJSONRequestBody = SecurityEndpointManagementAPIKillProcessRouteRequestBody + +// EndpointGetProcessesActionJSONRequestBody defines body for EndpointGetProcessesAction for application/json ContentType. +type EndpointGetProcessesActionJSONRequestBody = SecurityEndpointManagementAPIGetProcessesRouteRequestBody + +// RunScriptActionJSONRequestBody defines body for RunScriptAction for application/json ContentType. +type RunScriptActionJSONRequestBody = SecurityEndpointManagementAPIRunScriptRouteRequestBody + +// EndpointScanActionJSONRequestBody defines body for EndpointScanAction for application/json ContentType. +type EndpointScanActionJSONRequestBody = SecurityEndpointManagementAPIScanRouteRequestBody + +// EndpointSuspendProcessActionJSONRequestBody defines body for EndpointSuspendProcessAction for application/json ContentType. +type EndpointSuspendProcessActionJSONRequestBody = SecurityEndpointManagementAPISuspendProcessRouteRequestBody + +// EndpointUnisolateActionJSONRequestBody defines body for EndpointUnisolateAction for application/json ContentType. +type EndpointUnisolateActionJSONRequestBody EndpointUnisolateActionJSONBody + +// EndpointUploadActionMultipartRequestBody defines body for EndpointUploadAction for multipart/form-data ContentType. +type EndpointUploadActionMultipartRequestBody = SecurityEndpointManagementAPIUploadRouteRequestBody + +// CreateUpdateProtectionUpdatesNoteJSONRequestBody defines body for CreateUpdateProtectionUpdatesNote for application/json ContentType. +type CreateUpdateProtectionUpdatesNoteJSONRequestBody CreateUpdateProtectionUpdatesNoteJSONBody + +// CreateEndpointListItemJSONRequestBody defines body for CreateEndpointListItem for application/json ContentType. +type CreateEndpointListItemJSONRequestBody CreateEndpointListItemJSONBody + +// UpdateEndpointListItemJSONRequestBody defines body for UpdateEndpointListItem for application/json ContentType. +type UpdateEndpointListItemJSONRequestBody UpdateEndpointListItemJSONBody + +// CreatePrivMonUserJSONRequestBody defines body for CreatePrivMonUser for application/json ContentType. +type CreatePrivMonUserJSONRequestBody = SecurityEntityAnalyticsAPIUserName + +// PrivmonBulkUploadUsersCSVMultipartRequestBody defines body for PrivmonBulkUploadUsersCSV for multipart/form-data ContentType. +type PrivmonBulkUploadUsersCSVMultipartRequestBody PrivmonBulkUploadUsersCSVMultipartBody + +// UpdatePrivMonUserJSONRequestBody defines body for UpdatePrivMonUser for application/json ContentType. +type UpdatePrivMonUserJSONRequestBody = SecurityEntityAnalyticsAPIMonitoredUserDoc + +// InitEntityStoreJSONRequestBody defines body for InitEntityStore for application/json ContentType. +type InitEntityStoreJSONRequestBody InitEntityStoreJSONBody + +// InitEntityEngineJSONRequestBody defines body for InitEntityEngine for application/json ContentType. +type InitEntityEngineJSONRequestBody InitEntityEngineJSONBody + +// CreateExceptionListJSONRequestBody defines body for CreateExceptionList for application/json ContentType. +type CreateExceptionListJSONRequestBody CreateExceptionListJSONBody + +// UpdateExceptionListJSONRequestBody defines body for UpdateExceptionList for application/json ContentType. +type UpdateExceptionListJSONRequestBody UpdateExceptionListJSONBody + +// ImportExceptionListMultipartRequestBody defines body for ImportExceptionList for multipart/form-data ContentType. +type ImportExceptionListMultipartRequestBody ImportExceptionListMultipartBody + +// CreateExceptionListItemJSONRequestBody defines body for CreateExceptionListItem for application/json ContentType. +type CreateExceptionListItemJSONRequestBody CreateExceptionListItemJSONBody + +// UpdateExceptionListItemJSONRequestBody defines body for UpdateExceptionListItem for application/json ContentType. +type UpdateExceptionListItemJSONRequestBody UpdateExceptionListItemJSONBody + +// CreateSharedExceptionListJSONRequestBody defines body for CreateSharedExceptionList for application/json ContentType. +type CreateSharedExceptionListJSONRequestBody CreateSharedExceptionListJSONBody + +// PostFleetAgentDownloadSourcesJSONRequestBody defines body for PostFleetAgentDownloadSources for application/json ContentType. +type PostFleetAgentDownloadSourcesJSONRequestBody PostFleetAgentDownloadSourcesJSONBody + +// PutFleetAgentDownloadSourcesSourceidJSONRequestBody defines body for PutFleetAgentDownloadSourcesSourceid for application/json ContentType. +type PutFleetAgentDownloadSourcesSourceidJSONRequestBody PutFleetAgentDownloadSourcesSourceidJSONBody + +// PostFleetAgentPoliciesJSONRequestBody defines body for PostFleetAgentPolicies for application/json ContentType. +type PostFleetAgentPoliciesJSONRequestBody PostFleetAgentPoliciesJSONBody + +// PostFleetAgentPoliciesBulkGetJSONRequestBody defines body for PostFleetAgentPoliciesBulkGet for application/json ContentType. +type PostFleetAgentPoliciesBulkGetJSONRequestBody PostFleetAgentPoliciesBulkGetJSONBody + +// PostFleetAgentPoliciesDeleteJSONRequestBody defines body for PostFleetAgentPoliciesDelete for application/json ContentType. +type PostFleetAgentPoliciesDeleteJSONRequestBody PostFleetAgentPoliciesDeleteJSONBody + +// PostFleetAgentPoliciesOutputsJSONRequestBody defines body for PostFleetAgentPoliciesOutputs for application/json ContentType. +type PostFleetAgentPoliciesOutputsJSONRequestBody PostFleetAgentPoliciesOutputsJSONBody + +// PutFleetAgentPoliciesAgentpolicyidJSONRequestBody defines body for PutFleetAgentPoliciesAgentpolicyid for application/json ContentType. +type PutFleetAgentPoliciesAgentpolicyidJSONRequestBody PutFleetAgentPoliciesAgentpolicyidJSONBody + +// PostFleetAgentPoliciesAgentpolicyidCopyJSONRequestBody defines body for PostFleetAgentPoliciesAgentpolicyidCopy for application/json ContentType. +type PostFleetAgentPoliciesAgentpolicyidCopyJSONRequestBody PostFleetAgentPoliciesAgentpolicyidCopyJSONBody + +// PostFleetAgentsJSONRequestBody defines body for PostFleetAgents for application/json ContentType. +type PostFleetAgentsJSONRequestBody PostFleetAgentsJSONBody + +// PostFleetAgentsBulkReassignJSONRequestBody defines body for PostFleetAgentsBulkReassign for application/json ContentType. +type PostFleetAgentsBulkReassignJSONRequestBody PostFleetAgentsBulkReassignJSONBody + +// PostFleetAgentsBulkRequestDiagnosticsJSONRequestBody defines body for PostFleetAgentsBulkRequestDiagnostics for application/json ContentType. +type PostFleetAgentsBulkRequestDiagnosticsJSONRequestBody PostFleetAgentsBulkRequestDiagnosticsJSONBody + +// PostFleetAgentsBulkUnenrollJSONRequestBody defines body for PostFleetAgentsBulkUnenroll for application/json ContentType. +type PostFleetAgentsBulkUnenrollJSONRequestBody PostFleetAgentsBulkUnenrollJSONBody + +// PostFleetAgentsBulkUpdateAgentTagsJSONRequestBody defines body for PostFleetAgentsBulkUpdateAgentTags for application/json ContentType. +type PostFleetAgentsBulkUpdateAgentTagsJSONRequestBody PostFleetAgentsBulkUpdateAgentTagsJSONBody + +// PostFleetAgentsBulkUpgradeJSONRequestBody defines body for PostFleetAgentsBulkUpgrade for application/json ContentType. +type PostFleetAgentsBulkUpgradeJSONRequestBody PostFleetAgentsBulkUpgradeJSONBody + +// PutFleetAgentsAgentidJSONRequestBody defines body for PutFleetAgentsAgentid for application/json ContentType. +type PutFleetAgentsAgentidJSONRequestBody PutFleetAgentsAgentidJSONBody + +// PostFleetAgentsAgentidActionsJSONRequestBody defines body for PostFleetAgentsAgentidActions for application/json ContentType. +type PostFleetAgentsAgentidActionsJSONRequestBody PostFleetAgentsAgentidActionsJSONBody + +// PostFleetAgentsAgentidReassignJSONRequestBody defines body for PostFleetAgentsAgentidReassign for application/json ContentType. +type PostFleetAgentsAgentidReassignJSONRequestBody PostFleetAgentsAgentidReassignJSONBody + +// PostFleetAgentsAgentidRequestDiagnosticsJSONRequestBody defines body for PostFleetAgentsAgentidRequestDiagnostics for application/json ContentType. +type PostFleetAgentsAgentidRequestDiagnosticsJSONRequestBody PostFleetAgentsAgentidRequestDiagnosticsJSONBody + +// PostFleetAgentsAgentidUnenrollJSONRequestBody defines body for PostFleetAgentsAgentidUnenroll for application/json ContentType. +type PostFleetAgentsAgentidUnenrollJSONRequestBody PostFleetAgentsAgentidUnenrollJSONBody + +// PostFleetAgentsAgentidUpgradeJSONRequestBody defines body for PostFleetAgentsAgentidUpgrade for application/json ContentType. +type PostFleetAgentsAgentidUpgradeJSONRequestBody PostFleetAgentsAgentidUpgradeJSONBody + +// PostFleetEnrollmentApiKeysJSONRequestBody defines body for PostFleetEnrollmentApiKeys for application/json ContentType. +type PostFleetEnrollmentApiKeysJSONRequestBody PostFleetEnrollmentApiKeysJSONBody + +// PostFleetEpmBulkAssetsJSONRequestBody defines body for PostFleetEpmBulkAssets for application/json ContentType. +type PostFleetEpmBulkAssetsJSONRequestBody PostFleetEpmBulkAssetsJSONBody + +// PostFleetEpmCustomIntegrationsJSONRequestBody defines body for PostFleetEpmCustomIntegrations for application/json ContentType. +type PostFleetEpmCustomIntegrationsJSONRequestBody PostFleetEpmCustomIntegrationsJSONBody + +// PutFleetEpmCustomIntegrationsPkgnameJSONRequestBody defines body for PutFleetEpmCustomIntegrationsPkgname for application/json ContentType. +type PutFleetEpmCustomIntegrationsPkgnameJSONRequestBody PutFleetEpmCustomIntegrationsPkgnameJSONBody + +// PostFleetEpmPackagesBulkJSONRequestBody defines body for PostFleetEpmPackagesBulk for application/json ContentType. +type PostFleetEpmPackagesBulkJSONRequestBody PostFleetEpmPackagesBulkJSONBody + +// PostFleetEpmPackagesBulkUninstallJSONRequestBody defines body for PostFleetEpmPackagesBulkUninstall for application/json ContentType. +type PostFleetEpmPackagesBulkUninstallJSONRequestBody PostFleetEpmPackagesBulkUninstallJSONBody + +// PostFleetEpmPackagesBulkUpgradeJSONRequestBody defines body for PostFleetEpmPackagesBulkUpgrade for application/json ContentType. +type PostFleetEpmPackagesBulkUpgradeJSONRequestBody PostFleetEpmPackagesBulkUpgradeJSONBody + +// PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody defines body for PostFleetEpmPackagesPkgnamePkgversion for application/json ContentType. +type PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody PostFleetEpmPackagesPkgnamePkgversionJSONBody + +// PutFleetEpmPackagesPkgnamePkgversionJSONRequestBody defines body for PutFleetEpmPackagesPkgnamePkgversion for application/json ContentType. +type PutFleetEpmPackagesPkgnamePkgversionJSONRequestBody PutFleetEpmPackagesPkgnamePkgversionJSONBody + +// PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsJSONRequestBody defines body for PostFleetEpmPackagesPkgnamePkgversionKibanaAssets for application/json ContentType. +type PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsJSONRequestBody PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsJSONBody + +// PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeJSONRequestBody defines body for PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorize for application/json ContentType. +type PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeJSONRequestBody PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeJSONBody + +// PostFleetFleetServerHostsJSONRequestBody defines body for PostFleetFleetServerHosts for application/json ContentType. +type PostFleetFleetServerHostsJSONRequestBody PostFleetFleetServerHostsJSONBody + +// PutFleetFleetServerHostsItemidJSONRequestBody defines body for PutFleetFleetServerHostsItemid for application/json ContentType. +type PutFleetFleetServerHostsItemidJSONRequestBody PutFleetFleetServerHostsItemidJSONBody + +// PostFleetHealthCheckJSONRequestBody defines body for PostFleetHealthCheck for application/json ContentType. +type PostFleetHealthCheckJSONRequestBody PostFleetHealthCheckJSONBody + +// PostFleetOutputsJSONRequestBody defines body for PostFleetOutputs for application/json ContentType. +type PostFleetOutputsJSONRequestBody = NewOutputUnion + +// PutFleetOutputsOutputidJSONRequestBody defines body for PutFleetOutputsOutputid for application/json ContentType. +type PutFleetOutputsOutputidJSONRequestBody = UpdateOutputUnion + +// PostFleetPackagePoliciesJSONRequestBody defines body for PostFleetPackagePolicies for application/json ContentType. +type PostFleetPackagePoliciesJSONRequestBody = PackagePolicyRequest + +// PostFleetPackagePoliciesBulkGetJSONRequestBody defines body for PostFleetPackagePoliciesBulkGet for application/json ContentType. +type PostFleetPackagePoliciesBulkGetJSONRequestBody PostFleetPackagePoliciesBulkGetJSONBody + +// PostFleetPackagePoliciesDeleteJSONRequestBody defines body for PostFleetPackagePoliciesDelete for application/json ContentType. +type PostFleetPackagePoliciesDeleteJSONRequestBody PostFleetPackagePoliciesDeleteJSONBody + +// PostFleetPackagePoliciesUpgradeJSONRequestBody defines body for PostFleetPackagePoliciesUpgrade for application/json ContentType. +type PostFleetPackagePoliciesUpgradeJSONRequestBody PostFleetPackagePoliciesUpgradeJSONBody + +// PostFleetPackagePoliciesUpgradeDryrunJSONRequestBody defines body for PostFleetPackagePoliciesUpgradeDryrun for application/json ContentType. +type PostFleetPackagePoliciesUpgradeDryrunJSONRequestBody PostFleetPackagePoliciesUpgradeDryrunJSONBody + +// PutFleetPackagePoliciesPackagepolicyidJSONRequestBody defines body for PutFleetPackagePoliciesPackagepolicyid for application/json ContentType. +type PutFleetPackagePoliciesPackagepolicyidJSONRequestBody = PackagePolicyRequest + +// PostFleetProxiesJSONRequestBody defines body for PostFleetProxies for application/json ContentType. +type PostFleetProxiesJSONRequestBody PostFleetProxiesJSONBody + +// PutFleetProxiesItemidJSONRequestBody defines body for PutFleetProxiesItemid for application/json ContentType. +type PutFleetProxiesItemidJSONRequestBody PutFleetProxiesItemidJSONBody + +// PostFleetServiceTokensJSONRequestBody defines body for PostFleetServiceTokens for application/json ContentType. +type PostFleetServiceTokensJSONRequestBody PostFleetServiceTokensJSONBody + +// PutFleetSettingsJSONRequestBody defines body for PutFleetSettings for application/json ContentType. +type PutFleetSettingsJSONRequestBody PutFleetSettingsJSONBody + +// PutFleetSpaceSettingsJSONRequestBody defines body for PutFleetSpaceSettings for application/json ContentType. +type PutFleetSpaceSettingsJSONRequestBody PutFleetSpaceSettingsJSONBody + +// PatchListJSONRequestBody defines body for PatchList for application/json ContentType. +type PatchListJSONRequestBody PatchListJSONBody + +// CreateListJSONRequestBody defines body for CreateList for application/json ContentType. +type CreateListJSONRequestBody CreateListJSONBody + +// UpdateListJSONRequestBody defines body for UpdateList for application/json ContentType. +type UpdateListJSONRequestBody UpdateListJSONBody + +// PatchListItemJSONRequestBody defines body for PatchListItem for application/json ContentType. +type PatchListItemJSONRequestBody PatchListItemJSONBody + +// CreateListItemJSONRequestBody defines body for CreateListItem for application/json ContentType. +type CreateListItemJSONRequestBody CreateListItemJSONBody + +// UpdateListItemJSONRequestBody defines body for UpdateListItem for application/json ContentType. +type UpdateListItemJSONRequestBody UpdateListItemJSONBody + +// ImportListItemsMultipartRequestBody defines body for ImportListItems for multipart/form-data ContentType. +type ImportListItemsMultipartRequestBody ImportListItemsMultipartBody + +// PutLogstashPipelineJSONRequestBody defines body for PutLogstashPipeline for application/json ContentType. +type PutLogstashPipelineJSONRequestBody PutLogstashPipelineJSONBody + +// DeleteNoteJSONRequestBody defines body for DeleteNote for application/json ContentType. +type DeleteNoteJSONRequestBody DeleteNoteJSONBody + +// PersistNoteRouteJSONRequestBody defines body for PersistNoteRoute for application/json ContentType. +type PersistNoteRouteJSONRequestBody PersistNoteRouteJSONBody + +// ObservabilityAiAssistantChatCompleteJSONRequestBody defines body for ObservabilityAiAssistantChatComplete for application/json ContentType. +type ObservabilityAiAssistantChatCompleteJSONRequestBody ObservabilityAiAssistantChatCompleteJSONBody + +// OsqueryCreateLiveQueryJSONRequestBody defines body for OsqueryCreateLiveQuery for application/json ContentType. +type OsqueryCreateLiveQueryJSONRequestBody = SecurityOsqueryAPICreateLiveQueryRequestBody + +// OsqueryCreatePacksJSONRequestBody defines body for OsqueryCreatePacks for application/json ContentType. +type OsqueryCreatePacksJSONRequestBody = SecurityOsqueryAPICreatePacksRequestBody + +// OsqueryUpdatePacksJSONRequestBody defines body for OsqueryUpdatePacks for application/json ContentType. +type OsqueryUpdatePacksJSONRequestBody = SecurityOsqueryAPIUpdatePacksRequestBody + +// OsqueryCreateSavedQueryJSONRequestBody defines body for OsqueryCreateSavedQuery for application/json ContentType. +type OsqueryCreateSavedQueryJSONRequestBody = SecurityOsqueryAPICreateSavedQueryRequestBody + +// OsqueryUpdateSavedQueryJSONRequestBody defines body for OsqueryUpdateSavedQuery for application/json ContentType. +type OsqueryUpdateSavedQueryJSONRequestBody = SecurityOsqueryAPIUpdateSavedQueryRequestBody + +// PersistPinnedEventRouteJSONRequestBody defines body for PersistPinnedEventRoute for application/json ContentType. +type PersistPinnedEventRouteJSONRequestBody PersistPinnedEventRouteJSONBody + +// ConfigureRiskEngineSavedObjectJSONRequestBody defines body for ConfigureRiskEngineSavedObject for application/json ContentType. +type ConfigureRiskEngineSavedObjectJSONRequestBody ConfigureRiskEngineSavedObjectJSONBody + +// ScheduleRiskEngineNowJSONRequestBody defines body for ScheduleRiskEngineNow for application/json ContentType. +type ScheduleRiskEngineNowJSONRequestBody ScheduleRiskEngineNowJSONBody + +// BulkCreateSavedObjectsJSONRequestBody defines body for BulkCreateSavedObjects for application/json ContentType. +type BulkCreateSavedObjectsJSONRequestBody = BulkCreateSavedObjectsJSONBody + +// BulkDeleteSavedObjectsJSONRequestBody defines body for BulkDeleteSavedObjects for application/json ContentType. +type BulkDeleteSavedObjectsJSONRequestBody = BulkDeleteSavedObjectsJSONBody + +// BulkGetSavedObjectsJSONRequestBody defines body for BulkGetSavedObjects for application/json ContentType. +type BulkGetSavedObjectsJSONRequestBody = BulkGetSavedObjectsJSONBody + +// BulkResolveSavedObjectsJSONRequestBody defines body for BulkResolveSavedObjects for application/json ContentType. +type BulkResolveSavedObjectsJSONRequestBody = BulkResolveSavedObjectsJSONBody + +// BulkUpdateSavedObjectsJSONRequestBody defines body for BulkUpdateSavedObjects for application/json ContentType. +type BulkUpdateSavedObjectsJSONRequestBody = BulkUpdateSavedObjectsJSONBody + +// PostSavedObjectsExportJSONRequestBody defines body for PostSavedObjectsExport for application/json ContentType. +type PostSavedObjectsExportJSONRequestBody PostSavedObjectsExportJSONBody + +// PostSavedObjectsImportMultipartRequestBody defines body for PostSavedObjectsImport for multipart/form-data ContentType. +type PostSavedObjectsImportMultipartRequestBody PostSavedObjectsImportMultipartBody + +// ResolveImportErrorsMultipartRequestBody defines body for ResolveImportErrors for multipart/form-data ContentType. +type ResolveImportErrorsMultipartRequestBody ResolveImportErrorsMultipartBody + +// CreateSavedObjectJSONRequestBody defines body for CreateSavedObject for application/json ContentType. +type CreateSavedObjectJSONRequestBody CreateSavedObjectJSONBody + +// CreateSavedObjectIdJSONRequestBody defines body for CreateSavedObjectId for application/json ContentType. +type CreateSavedObjectIdJSONRequestBody CreateSavedObjectIdJSONBody + +// UpdateSavedObjectJSONRequestBody defines body for UpdateSavedObject for application/json ContentType. +type UpdateSavedObjectJSONRequestBody = UpdateSavedObjectJSONBody + +// PostSecurityRoleQueryJSONRequestBody defines body for PostSecurityRoleQuery for application/json ContentType. +type PostSecurityRoleQueryJSONRequestBody PostSecurityRoleQueryJSONBody + +// PutSecurityRoleNameJSONRequestBody defines body for PutSecurityRoleName for application/json ContentType. +type PutSecurityRoleNameJSONRequestBody PutSecurityRoleNameJSONBody + +// PostSecurityRolesJSONRequestBody defines body for PostSecurityRoles for application/json ContentType. +type PostSecurityRolesJSONRequestBody PostSecurityRolesJSONBody + +// PostSecuritySessionInvalidateJSONRequestBody defines body for PostSecuritySessionInvalidate for application/json ContentType. +type PostSecuritySessionInvalidateJSONRequestBody PostSecuritySessionInvalidateJSONBody + +// PerformAnonymizationFieldsBulkActionJSONRequestBody defines body for PerformAnonymizationFieldsBulkAction for application/json ContentType. +type PerformAnonymizationFieldsBulkActionJSONRequestBody PerformAnonymizationFieldsBulkActionJSONBody + +// ChatCompleteJSONRequestBody defines body for ChatComplete for application/json ContentType. +type ChatCompleteJSONRequestBody = SecurityAIAssistantAPIChatCompleteProps + +// DeleteAllConversationsJSONRequestBody defines body for DeleteAllConversations for application/json ContentType. +type DeleteAllConversationsJSONRequestBody DeleteAllConversationsJSONBody + +// CreateConversationJSONRequestBody defines body for CreateConversation for application/json ContentType. +type CreateConversationJSONRequestBody = SecurityAIAssistantAPIConversationCreateProps + +// UpdateConversationJSONRequestBody defines body for UpdateConversation for application/json ContentType. +type UpdateConversationJSONRequestBody = SecurityAIAssistantAPIConversationUpdateProps + +// CreateKnowledgeBaseEntryJSONRequestBody defines body for CreateKnowledgeBaseEntry for application/json ContentType. +type CreateKnowledgeBaseEntryJSONRequestBody = SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps + +// PerformKnowledgeBaseEntryBulkActionJSONRequestBody defines body for PerformKnowledgeBaseEntryBulkAction for application/json ContentType. +type PerformKnowledgeBaseEntryBulkActionJSONRequestBody PerformKnowledgeBaseEntryBulkActionJSONBody + +// UpdateKnowledgeBaseEntryJSONRequestBody defines body for UpdateKnowledgeBaseEntry for application/json ContentType. +type UpdateKnowledgeBaseEntryJSONRequestBody = SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps + +// PerformPromptsBulkActionJSONRequestBody defines body for PerformPromptsBulkAction for application/json ContentType. +type PerformPromptsBulkActionJSONRequestBody PerformPromptsBulkActionJSONBody + +// PostUrlJSONRequestBody defines body for PostUrl for application/json ContentType. +type PostUrlJSONRequestBody PostUrlJSONBody + +// PostSpacesCopySavedObjectsJSONRequestBody defines body for PostSpacesCopySavedObjects for application/json ContentType. +type PostSpacesCopySavedObjectsJSONRequestBody PostSpacesCopySavedObjectsJSONBody + +// PostSpacesDisableLegacyUrlAliasesJSONRequestBody defines body for PostSpacesDisableLegacyUrlAliases for application/json ContentType. +type PostSpacesDisableLegacyUrlAliasesJSONRequestBody PostSpacesDisableLegacyUrlAliasesJSONBody + +// PostSpacesGetShareableReferencesJSONRequestBody defines body for PostSpacesGetShareableReferences for application/json ContentType. +type PostSpacesGetShareableReferencesJSONRequestBody PostSpacesGetShareableReferencesJSONBody + +// PostSpacesResolveCopySavedObjectsErrorsJSONRequestBody defines body for PostSpacesResolveCopySavedObjectsErrors for application/json ContentType. +type PostSpacesResolveCopySavedObjectsErrorsJSONRequestBody PostSpacesResolveCopySavedObjectsErrorsJSONBody + +// PostSpacesUpdateObjectsSpacesJSONRequestBody defines body for PostSpacesUpdateObjectsSpaces for application/json ContentType. +type PostSpacesUpdateObjectsSpacesJSONRequestBody PostSpacesUpdateObjectsSpacesJSONBody + +// PostSpacesSpaceJSONRequestBody defines body for PostSpacesSpace for application/json ContentType. +type PostSpacesSpaceJSONRequestBody PostSpacesSpaceJSONBody + +// PutSpacesSpaceIdJSONRequestBody defines body for PutSpacesSpaceId for application/json ContentType. +type PutSpacesSpaceIdJSONRequestBody PutSpacesSpaceIdJSONBody + +// GetStreamsJSONRequestBody defines body for GetStreams for application/json ContentType. +type GetStreamsJSONRequestBody GetStreamsJSONBody + +// PostStreamsDisableJSONRequestBody defines body for PostStreamsDisable for application/json ContentType. +type PostStreamsDisableJSONRequestBody PostStreamsDisableJSONBody + +// PostStreamsEnableJSONRequestBody defines body for PostStreamsEnable for application/json ContentType. +type PostStreamsEnableJSONRequestBody PostStreamsEnableJSONBody + +// PostStreamsResyncJSONRequestBody defines body for PostStreamsResync for application/json ContentType. +type PostStreamsResyncJSONRequestBody PostStreamsResyncJSONBody + +// DeleteStreamsNameJSONRequestBody defines body for DeleteStreamsName for application/json ContentType. +type DeleteStreamsNameJSONRequestBody DeleteStreamsNameJSONBody + +// GetStreamsNameJSONRequestBody defines body for GetStreamsName for application/json ContentType. +type GetStreamsNameJSONRequestBody GetStreamsNameJSONBody + +// PutStreamsNameJSONRequestBody defines body for PutStreamsName for application/json ContentType. +type PutStreamsNameJSONRequestBody PutStreamsNameJSONBody + +// PostStreamsNameForkJSONRequestBody defines body for PostStreamsNameFork for application/json ContentType. +type PostStreamsNameForkJSONRequestBody PostStreamsNameForkJSONBody + +// GetStreamsNameGroupJSONRequestBody defines body for GetStreamsNameGroup for application/json ContentType. +type GetStreamsNameGroupJSONRequestBody GetStreamsNameGroupJSONBody + +// PutStreamsNameGroupJSONRequestBody defines body for PutStreamsNameGroup for application/json ContentType. +type PutStreamsNameGroupJSONRequestBody PutStreamsNameGroupJSONBody + +// GetStreamsNameIngestJSONRequestBody defines body for GetStreamsNameIngest for application/json ContentType. +type GetStreamsNameIngestJSONRequestBody GetStreamsNameIngestJSONBody + +// PutStreamsNameIngestJSONRequestBody defines body for PutStreamsNameIngest for application/json ContentType. +type PutStreamsNameIngestJSONRequestBody PutStreamsNameIngestJSONBody + +// PostStreamsNameContentExportJSONRequestBody defines body for PostStreamsNameContentExport for application/json ContentType. +type PostStreamsNameContentExportJSONRequestBody PostStreamsNameContentExportJSONBody + +// PostStreamsNameContentImportMultipartRequestBody defines body for PostStreamsNameContentImport for multipart/form-data ContentType. +type PostStreamsNameContentImportMultipartRequestBody PostStreamsNameContentImportMultipartBody + +// GetStreamsNameDashboardsJSONRequestBody defines body for GetStreamsNameDashboards for application/json ContentType. +type GetStreamsNameDashboardsJSONRequestBody GetStreamsNameDashboardsJSONBody + +// PostStreamsNameDashboardsBulkJSONRequestBody defines body for PostStreamsNameDashboardsBulk for application/json ContentType. +type PostStreamsNameDashboardsBulkJSONRequestBody PostStreamsNameDashboardsBulkJSONBody + +// DeleteStreamsNameDashboardsDashboardidJSONRequestBody defines body for DeleteStreamsNameDashboardsDashboardid for application/json ContentType. +type DeleteStreamsNameDashboardsDashboardidJSONRequestBody DeleteStreamsNameDashboardsDashboardidJSONBody + +// PutStreamsNameDashboardsDashboardidJSONRequestBody defines body for PutStreamsNameDashboardsDashboardid for application/json ContentType. +type PutStreamsNameDashboardsDashboardidJSONRequestBody PutStreamsNameDashboardsDashboardidJSONBody + +// GetStreamsNameQueriesJSONRequestBody defines body for GetStreamsNameQueries for application/json ContentType. +type GetStreamsNameQueriesJSONRequestBody GetStreamsNameQueriesJSONBody + +// PostStreamsNameQueriesBulkJSONRequestBody defines body for PostStreamsNameQueriesBulk for application/json ContentType. +type PostStreamsNameQueriesBulkJSONRequestBody PostStreamsNameQueriesBulkJSONBody + +// DeleteStreamsNameQueriesQueryidJSONRequestBody defines body for DeleteStreamsNameQueriesQueryid for application/json ContentType. +type DeleteStreamsNameQueriesQueryidJSONRequestBody DeleteStreamsNameQueriesQueryidJSONBody + +// PutStreamsNameQueriesQueryidJSONRequestBody defines body for PutStreamsNameQueriesQueryid for application/json ContentType. +type PutStreamsNameQueriesQueryidJSONRequestBody PutStreamsNameQueriesQueryidJSONBody + +// GetStreamsNameRulesJSONRequestBody defines body for GetStreamsNameRules for application/json ContentType. +type GetStreamsNameRulesJSONRequestBody GetStreamsNameRulesJSONBody + +// DeleteStreamsNameRulesRuleidJSONRequestBody defines body for DeleteStreamsNameRulesRuleid for application/json ContentType. +type DeleteStreamsNameRulesRuleidJSONRequestBody DeleteStreamsNameRulesRuleidJSONBody + +// PutStreamsNameRulesRuleidJSONRequestBody defines body for PutStreamsNameRulesRuleid for application/json ContentType. +type PutStreamsNameRulesRuleidJSONRequestBody PutStreamsNameRulesRuleidJSONBody + +// GetStreamsNameSignificantEventsJSONRequestBody defines body for GetStreamsNameSignificantEvents for application/json ContentType. +type GetStreamsNameSignificantEventsJSONRequestBody GetStreamsNameSignificantEventsJSONBody + +// GetStreamsNameSignificantEventsGenerateJSONRequestBody defines body for GetStreamsNameSignificantEventsGenerate for application/json ContentType. +type GetStreamsNameSignificantEventsGenerateJSONRequestBody GetStreamsNameSignificantEventsGenerateJSONBody + +// PostStreamsNameSignificantEventsPreviewJSONRequestBody defines body for PostStreamsNameSignificantEventsPreview for application/json ContentType. +type PostStreamsNameSignificantEventsPreviewJSONRequestBody PostStreamsNameSignificantEventsPreviewJSONBody + +// PostSyntheticMonitorsJSONRequestBody defines body for PostSyntheticMonitors for application/json ContentType. +type PostSyntheticMonitorsJSONRequestBody PostSyntheticMonitorsJSONBody + +// DeleteSyntheticMonitorsJSONRequestBody defines body for DeleteSyntheticMonitors for application/json ContentType. +type DeleteSyntheticMonitorsJSONRequestBody DeleteSyntheticMonitorsJSONBody + +// PutSyntheticMonitorJSONRequestBody defines body for PutSyntheticMonitor for application/json ContentType. +type PutSyntheticMonitorJSONRequestBody PutSyntheticMonitorJSONBody + +// PostParametersJSONRequestBody defines body for PostParameters for application/json ContentType. +type PostParametersJSONRequestBody PostParametersJSONBody + +// DeleteParametersJSONRequestBody defines body for DeleteParameters for application/json ContentType. +type DeleteParametersJSONRequestBody = DeleteParametersJSONBody + +// PutParameterJSONRequestBody defines body for PutParameter for application/json ContentType. +type PutParameterJSONRequestBody PutParameterJSONBody + +// PostPrivateLocationJSONRequestBody defines body for PostPrivateLocation for application/json ContentType. +type PostPrivateLocationJSONRequestBody PostPrivateLocationJSONBody + +// PutPrivateLocationJSONRequestBody defines body for PutPrivateLocation for application/json ContentType. +type PutPrivateLocationJSONRequestBody PutPrivateLocationJSONBody + +// DeleteTimelinesJSONRequestBody defines body for DeleteTimelines for application/json ContentType. +type DeleteTimelinesJSONRequestBody DeleteTimelinesJSONBody + +// PatchTimelineJSONRequestBody defines body for PatchTimeline for application/json ContentType. +type PatchTimelineJSONRequestBody PatchTimelineJSONBody + +// CreateTimelinesJSONRequestBody defines body for CreateTimelines for application/json ContentType. +type CreateTimelinesJSONRequestBody CreateTimelinesJSONBody + +// CopyTimelineJSONRequestBody defines body for CopyTimeline for application/json ContentType. +type CopyTimelineJSONRequestBody CopyTimelineJSONBody + +// CleanDraftTimelinesJSONRequestBody defines body for CleanDraftTimelines for application/json ContentType. +type CleanDraftTimelinesJSONRequestBody CleanDraftTimelinesJSONBody + +// ExportTimelinesJSONRequestBody defines body for ExportTimelines for application/json ContentType. +type ExportTimelinesJSONRequestBody ExportTimelinesJSONBody + +// PersistFavoriteRouteJSONRequestBody defines body for PersistFavoriteRoute for application/json ContentType. +type PersistFavoriteRouteJSONRequestBody PersistFavoriteRouteJSONBody + +// ImportTimelinesJSONRequestBody defines body for ImportTimelines for application/json ContentType. +type ImportTimelinesJSONRequestBody ImportTimelinesJSONBody + +// InstallPrepackedTimelinesJSONRequestBody defines body for InstallPrepackedTimelines for application/json ContentType. +type InstallPrepackedTimelinesJSONRequestBody InstallPrepackedTimelinesJSONBody + +// PutUptimeSettingsJSONRequestBody defines body for PutUptimeSettings for application/json ContentType. +type PutUptimeSettingsJSONRequestBody PutUptimeSettingsJSONBody + +// PostActionsConnectorIdJSONRequestBody defines body for PostActionsConnectorId for application/json ContentType. +type PostActionsConnectorIdJSONRequestBody PostActionsConnectorIdJSONBody + +// PutActionsConnectorIdJSONRequestBody defines body for PutActionsConnectorId for application/json ContentType. +type PutActionsConnectorIdJSONRequestBody PutActionsConnectorIdJSONBody + +// CreateDataViewDefaultwJSONRequestBody defines body for CreateDataViewDefaultw for application/json ContentType. +type CreateDataViewDefaultwJSONRequestBody = DataViewsCreateDataViewRequestObject + +// UpdateDataViewDefaultJSONRequestBody defines body for UpdateDataViewDefault for application/json ContentType. +type UpdateDataViewDefaultJSONRequestBody = DataViewsUpdateDataViewRequestObject + +// PostMaintenanceWindowJSONRequestBody defines body for PostMaintenanceWindow for application/json ContentType. +type PostMaintenanceWindowJSONRequestBody PostMaintenanceWindowJSONBody + +// PatchMaintenanceWindowIdJSONRequestBody defines body for PatchMaintenanceWindowId for application/json ContentType. +type PatchMaintenanceWindowIdJSONRequestBody PatchMaintenanceWindowIdJSONBody + +// CreateSloOpJSONRequestBody defines body for CreateSloOp for application/json ContentType. +type CreateSloOpJSONRequestBody = SLOsCreateSloRequest + +// BulkDeleteOpJSONRequestBody defines body for BulkDeleteOp for application/json ContentType. +type BulkDeleteOpJSONRequestBody = SLOsBulkDeleteRequest + +// DeleteRollupDataOpJSONRequestBody defines body for DeleteRollupDataOp for application/json ContentType. +type DeleteRollupDataOpJSONRequestBody = SLOsBulkPurgeRollupRequest + +// DeleteSloInstancesOpJSONRequestBody defines body for DeleteSloInstancesOp for application/json ContentType. +type DeleteSloInstancesOpJSONRequestBody = SLOsDeleteSloInstancesRequest + +// UpdateSloOpJSONRequestBody defines body for UpdateSloOp for application/json ContentType. +type UpdateSloOpJSONRequestBody = SLOsUpdateSloRequest + +// Getter for additional properties for PostActionsConnectorIdExecuteJSONBody_Params. Returns the specified +// element and whether it was found +func (a PostActionsConnectorIdExecuteJSONBody_Params) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PostActionsConnectorIdExecuteJSONBody_Params +func (a *PostActionsConnectorIdExecuteJSONBody_Params) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Getter for additional properties for PostAlertingRuleIdJSONBody_Params. Returns the specified +// element and whether it was found +func (a PostAlertingRuleIdJSONBody_Params) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PostAlertingRuleIdJSONBody_Params +func (a *PostAlertingRuleIdJSONBody_Params) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Getter for additional properties for SyntheticsBrowserMonitorFields. Returns the specified +// element and whether it was found +func (a SyntheticsBrowserMonitorFields) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for SyntheticsBrowserMonitorFields +func (a *SyntheticsBrowserMonitorFields) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for SyntheticsBrowserMonitorFields to handle AdditionalProperties +func (a *SyntheticsBrowserMonitorFields) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["alert"]; found { + err = json.Unmarshal(raw, &a.Alert) + if err != nil { + return fmt.Errorf("error reading 'alert': %w", err) + } + delete(object, "alert") + } + + if raw, found := object["enabled"]; found { + err = json.Unmarshal(raw, &a.Enabled) + if err != nil { + return fmt.Errorf("error reading 'enabled': %w", err) + } + delete(object, "enabled") + } + + if raw, found := object["ignore_https_errors"]; found { + err = json.Unmarshal(raw, &a.IgnoreHttpsErrors) + if err != nil { + return fmt.Errorf("error reading 'ignore_https_errors': %w", err) + } + delete(object, "ignore_https_errors") + } + + if raw, found := object["inline_script"]; found { + err = json.Unmarshal(raw, &a.InlineScript) + if err != nil { + return fmt.Errorf("error reading 'inline_script': %w", err) + } + delete(object, "inline_script") + } + + if raw, found := object["labels"]; found { + err = json.Unmarshal(raw, &a.Labels) + if err != nil { + return fmt.Errorf("error reading 'labels': %w", err) + } + delete(object, "labels") + } + + if raw, found := object["locations"]; found { + err = json.Unmarshal(raw, &a.Locations) + if err != nil { + return fmt.Errorf("error reading 'locations': %w", err) + } + delete(object, "locations") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["namespace"]; found { + err = json.Unmarshal(raw, &a.Namespace) + if err != nil { + return fmt.Errorf("error reading 'namespace': %w", err) + } + delete(object, "namespace") + } + + if raw, found := object["params"]; found { + err = json.Unmarshal(raw, &a.Params) + if err != nil { + return fmt.Errorf("error reading 'params': %w", err) + } + delete(object, "params") + } + + if raw, found := object["playwright_options"]; found { + err = json.Unmarshal(raw, &a.PlaywrightOptions) + if err != nil { + return fmt.Errorf("error reading 'playwright_options': %w", err) + } + delete(object, "playwright_options") + } + + if raw, found := object["private_locations"]; found { + err = json.Unmarshal(raw, &a.PrivateLocations) + if err != nil { + return fmt.Errorf("error reading 'private_locations': %w", err) + } + delete(object, "private_locations") + } + + if raw, found := object["retest_on_failure"]; found { + err = json.Unmarshal(raw, &a.RetestOnFailure) + if err != nil { + return fmt.Errorf("error reading 'retest_on_failure': %w", err) + } + delete(object, "retest_on_failure") + } + + if raw, found := object["schedule"]; found { + err = json.Unmarshal(raw, &a.Schedule) + if err != nil { + return fmt.Errorf("error reading 'schedule': %w", err) + } + delete(object, "schedule") + } + + if raw, found := object["screenshots"]; found { + err = json.Unmarshal(raw, &a.Screenshots) + if err != nil { + return fmt.Errorf("error reading 'screenshots': %w", err) + } + delete(object, "screenshots") + } + + if raw, found := object["service.name"]; found { + err = json.Unmarshal(raw, &a.ServiceName) + if err != nil { + return fmt.Errorf("error reading 'service.name': %w", err) + } + delete(object, "service.name") + } + + if raw, found := object["synthetics_args"]; found { + err = json.Unmarshal(raw, &a.SyntheticsArgs) + if err != nil { + return fmt.Errorf("error reading 'synthetics_args': %w", err) + } + delete(object, "synthetics_args") + } + + if raw, found := object["tags"]; found { + err = json.Unmarshal(raw, &a.Tags) + if err != nil { + return fmt.Errorf("error reading 'tags': %w", err) + } + delete(object, "tags") + } + + if raw, found := object["timeout"]; found { + err = json.Unmarshal(raw, &a.Timeout) + if err != nil { + return fmt.Errorf("error reading 'timeout': %w", err) + } + delete(object, "timeout") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for SyntheticsBrowserMonitorFields to handle AdditionalProperties +func (a SyntheticsBrowserMonitorFields) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Alert != nil { + object["alert"], err = json.Marshal(a.Alert) + if err != nil { + return nil, fmt.Errorf("error marshaling 'alert': %w", err) + } + } + + if a.Enabled != nil { + object["enabled"], err = json.Marshal(a.Enabled) + if err != nil { + return nil, fmt.Errorf("error marshaling 'enabled': %w", err) + } + } + + if a.IgnoreHttpsErrors != nil { + object["ignore_https_errors"], err = json.Marshal(a.IgnoreHttpsErrors) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ignore_https_errors': %w", err) + } + } + + object["inline_script"], err = json.Marshal(a.InlineScript) + if err != nil { + return nil, fmt.Errorf("error marshaling 'inline_script': %w", err) + } + + if a.Labels != nil { + object["labels"], err = json.Marshal(a.Labels) + if err != nil { + return nil, fmt.Errorf("error marshaling 'labels': %w", err) + } + } + + if a.Locations != nil { + object["locations"], err = json.Marshal(a.Locations) + if err != nil { + return nil, fmt.Errorf("error marshaling 'locations': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Namespace != nil { + object["namespace"], err = json.Marshal(a.Namespace) + if err != nil { + return nil, fmt.Errorf("error marshaling 'namespace': %w", err) + } + } + + if a.Params != nil { + object["params"], err = json.Marshal(a.Params) + if err != nil { + return nil, fmt.Errorf("error marshaling 'params': %w", err) + } + } + + if a.PlaywrightOptions != nil { + object["playwright_options"], err = json.Marshal(a.PlaywrightOptions) + if err != nil { + return nil, fmt.Errorf("error marshaling 'playwright_options': %w", err) + } + } + + if a.PrivateLocations != nil { + object["private_locations"], err = json.Marshal(a.PrivateLocations) + if err != nil { + return nil, fmt.Errorf("error marshaling 'private_locations': %w", err) + } + } + + if a.RetestOnFailure != nil { + object["retest_on_failure"], err = json.Marshal(a.RetestOnFailure) + if err != nil { + return nil, fmt.Errorf("error marshaling 'retest_on_failure': %w", err) + } + } + + if a.Schedule != nil { + object["schedule"], err = json.Marshal(a.Schedule) + if err != nil { + return nil, fmt.Errorf("error marshaling 'schedule': %w", err) + } + } + + if a.Screenshots != nil { + object["screenshots"], err = json.Marshal(a.Screenshots) + if err != nil { + return nil, fmt.Errorf("error marshaling 'screenshots': %w", err) + } + } + + if a.ServiceName != nil { + object["service.name"], err = json.Marshal(a.ServiceName) + if err != nil { + return nil, fmt.Errorf("error marshaling 'service.name': %w", err) + } + } + + if a.SyntheticsArgs != nil { + object["synthetics_args"], err = json.Marshal(a.SyntheticsArgs) + if err != nil { + return nil, fmt.Errorf("error marshaling 'synthetics_args': %w", err) + } + } + + if a.Tags != nil { + object["tags"], err = json.Marshal(a.Tags) + if err != nil { + return nil, fmt.Errorf("error marshaling 'tags': %w", err) + } + } + + if a.Timeout != nil { + object["timeout"], err = json.Marshal(a.Timeout) + if err != nil { + return nil, fmt.Errorf("error marshaling 'timeout': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for SyntheticsGetPrivateLocation. Returns the specified +// element and whether it was found +func (a SyntheticsGetPrivateLocation) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for SyntheticsGetPrivateLocation +func (a *SyntheticsGetPrivateLocation) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for SyntheticsGetPrivateLocation to handle AdditionalProperties +func (a *SyntheticsGetPrivateLocation) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["agentPolicyId"]; found { + err = json.Unmarshal(raw, &a.AgentPolicyId) + if err != nil { + return fmt.Errorf("error reading 'agentPolicyId': %w", err) + } + delete(object, "agentPolicyId") + } + + if raw, found := object["geo"]; found { + err = json.Unmarshal(raw, &a.Geo) + if err != nil { + return fmt.Errorf("error reading 'geo': %w", err) + } + delete(object, "geo") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["isInvalid"]; found { + err = json.Unmarshal(raw, &a.IsInvalid) + if err != nil { + return fmt.Errorf("error reading 'isInvalid': %w", err) + } + delete(object, "isInvalid") + } + + if raw, found := object["label"]; found { + err = json.Unmarshal(raw, &a.Label) + if err != nil { + return fmt.Errorf("error reading 'label': %w", err) + } + delete(object, "label") + } + + if raw, found := object["namespace"]; found { + err = json.Unmarshal(raw, &a.Namespace) + if err != nil { + return fmt.Errorf("error reading 'namespace': %w", err) + } + delete(object, "namespace") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for SyntheticsGetPrivateLocation to handle AdditionalProperties +func (a SyntheticsGetPrivateLocation) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AgentPolicyId != nil { + object["agentPolicyId"], err = json.Marshal(a.AgentPolicyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'agentPolicyId': %w", err) + } + } + + if a.Geo != nil { + object["geo"], err = json.Marshal(a.Geo) + if err != nil { + return nil, fmt.Errorf("error marshaling 'geo': %w", err) + } + } + + if a.Id != nil { + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + } + + if a.IsInvalid != nil { + object["isInvalid"], err = json.Marshal(a.IsInvalid) + if err != nil { + return nil, fmt.Errorf("error marshaling 'isInvalid': %w", err) + } + } + + if a.Label != nil { + object["label"], err = json.Marshal(a.Label) + if err != nil { + return nil, fmt.Errorf("error marshaling 'label': %w", err) + } + } + + if a.Namespace != nil { + object["namespace"], err = json.Marshal(a.Namespace) + if err != nil { + return nil, fmt.Errorf("error marshaling 'namespace': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for SyntheticsHttpMonitorFields. Returns the specified +// element and whether it was found +func (a SyntheticsHttpMonitorFields) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for SyntheticsHttpMonitorFields +func (a *SyntheticsHttpMonitorFields) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for SyntheticsHttpMonitorFields to handle AdditionalProperties +func (a *SyntheticsHttpMonitorFields) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["alert"]; found { + err = json.Unmarshal(raw, &a.Alert) + if err != nil { + return fmt.Errorf("error reading 'alert': %w", err) + } + delete(object, "alert") + } + + if raw, found := object["check"]; found { + err = json.Unmarshal(raw, &a.Check) + if err != nil { + return fmt.Errorf("error reading 'check': %w", err) + } + delete(object, "check") + } + + if raw, found := object["enabled"]; found { + err = json.Unmarshal(raw, &a.Enabled) + if err != nil { + return fmt.Errorf("error reading 'enabled': %w", err) + } + delete(object, "enabled") + } + + if raw, found := object["ipv4"]; found { + err = json.Unmarshal(raw, &a.Ipv4) + if err != nil { + return fmt.Errorf("error reading 'ipv4': %w", err) + } + delete(object, "ipv4") + } + + if raw, found := object["ipv6"]; found { + err = json.Unmarshal(raw, &a.Ipv6) + if err != nil { + return fmt.Errorf("error reading 'ipv6': %w", err) + } + delete(object, "ipv6") + } + + if raw, found := object["labels"]; found { + err = json.Unmarshal(raw, &a.Labels) + if err != nil { + return fmt.Errorf("error reading 'labels': %w", err) + } + delete(object, "labels") + } + + if raw, found := object["locations"]; found { + err = json.Unmarshal(raw, &a.Locations) + if err != nil { + return fmt.Errorf("error reading 'locations': %w", err) + } + delete(object, "locations") + } + + if raw, found := object["max_redirects"]; found { + err = json.Unmarshal(raw, &a.MaxRedirects) + if err != nil { + return fmt.Errorf("error reading 'max_redirects': %w", err) + } + delete(object, "max_redirects") + } + + if raw, found := object["mode"]; found { + err = json.Unmarshal(raw, &a.Mode) + if err != nil { + return fmt.Errorf("error reading 'mode': %w", err) + } + delete(object, "mode") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["namespace"]; found { + err = json.Unmarshal(raw, &a.Namespace) + if err != nil { + return fmt.Errorf("error reading 'namespace': %w", err) + } + delete(object, "namespace") + } + + if raw, found := object["params"]; found { + err = json.Unmarshal(raw, &a.Params) + if err != nil { + return fmt.Errorf("error reading 'params': %w", err) + } + delete(object, "params") + } + + if raw, found := object["password"]; found { + err = json.Unmarshal(raw, &a.Password) + if err != nil { + return fmt.Errorf("error reading 'password': %w", err) + } + delete(object, "password") + } + + if raw, found := object["private_locations"]; found { + err = json.Unmarshal(raw, &a.PrivateLocations) + if err != nil { + return fmt.Errorf("error reading 'private_locations': %w", err) + } + delete(object, "private_locations") + } + + if raw, found := object["proxy_headers"]; found { + err = json.Unmarshal(raw, &a.ProxyHeaders) + if err != nil { + return fmt.Errorf("error reading 'proxy_headers': %w", err) + } + delete(object, "proxy_headers") + } + + if raw, found := object["proxy_url"]; found { + err = json.Unmarshal(raw, &a.ProxyUrl) + if err != nil { + return fmt.Errorf("error reading 'proxy_url': %w", err) + } + delete(object, "proxy_url") + } + + if raw, found := object["response"]; found { + err = json.Unmarshal(raw, &a.Response) + if err != nil { + return fmt.Errorf("error reading 'response': %w", err) + } + delete(object, "response") + } + + if raw, found := object["retest_on_failure"]; found { + err = json.Unmarshal(raw, &a.RetestOnFailure) + if err != nil { + return fmt.Errorf("error reading 'retest_on_failure': %w", err) + } + delete(object, "retest_on_failure") + } + + if raw, found := object["schedule"]; found { + err = json.Unmarshal(raw, &a.Schedule) + if err != nil { + return fmt.Errorf("error reading 'schedule': %w", err) + } + delete(object, "schedule") + } + + if raw, found := object["service.name"]; found { + err = json.Unmarshal(raw, &a.ServiceName) + if err != nil { + return fmt.Errorf("error reading 'service.name': %w", err) + } + delete(object, "service.name") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if raw, found := object["tags"]; found { + err = json.Unmarshal(raw, &a.Tags) + if err != nil { + return fmt.Errorf("error reading 'tags': %w", err) + } + delete(object, "tags") + } + + if raw, found := object["timeout"]; found { + err = json.Unmarshal(raw, &a.Timeout) + if err != nil { + return fmt.Errorf("error reading 'timeout': %w", err) + } + delete(object, "timeout") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["url"]; found { + err = json.Unmarshal(raw, &a.Url) + if err != nil { + return fmt.Errorf("error reading 'url': %w", err) + } + delete(object, "url") + } + + if raw, found := object["username"]; found { + err = json.Unmarshal(raw, &a.Username) + if err != nil { + return fmt.Errorf("error reading 'username': %w", err) + } + delete(object, "username") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for SyntheticsHttpMonitorFields to handle AdditionalProperties +func (a SyntheticsHttpMonitorFields) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Alert != nil { + object["alert"], err = json.Marshal(a.Alert) + if err != nil { + return nil, fmt.Errorf("error marshaling 'alert': %w", err) + } + } + + if a.Check != nil { + object["check"], err = json.Marshal(a.Check) + if err != nil { + return nil, fmt.Errorf("error marshaling 'check': %w", err) + } + } + + if a.Enabled != nil { + object["enabled"], err = json.Marshal(a.Enabled) + if err != nil { + return nil, fmt.Errorf("error marshaling 'enabled': %w", err) + } + } + + if a.Ipv4 != nil { + object["ipv4"], err = json.Marshal(a.Ipv4) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ipv4': %w", err) + } + } + + if a.Ipv6 != nil { + object["ipv6"], err = json.Marshal(a.Ipv6) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ipv6': %w", err) + } + } + + if a.Labels != nil { + object["labels"], err = json.Marshal(a.Labels) + if err != nil { + return nil, fmt.Errorf("error marshaling 'labels': %w", err) + } + } + + if a.Locations != nil { + object["locations"], err = json.Marshal(a.Locations) + if err != nil { + return nil, fmt.Errorf("error marshaling 'locations': %w", err) + } + } + + if a.MaxRedirects != nil { + object["max_redirects"], err = json.Marshal(a.MaxRedirects) + if err != nil { + return nil, fmt.Errorf("error marshaling 'max_redirects': %w", err) + } + } + + if a.Mode != nil { + object["mode"], err = json.Marshal(a.Mode) + if err != nil { + return nil, fmt.Errorf("error marshaling 'mode': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Namespace != nil { + object["namespace"], err = json.Marshal(a.Namespace) + if err != nil { + return nil, fmt.Errorf("error marshaling 'namespace': %w", err) + } + } + + if a.Params != nil { + object["params"], err = json.Marshal(a.Params) + if err != nil { + return nil, fmt.Errorf("error marshaling 'params': %w", err) + } + } + + if a.Password != nil { + object["password"], err = json.Marshal(a.Password) + if err != nil { + return nil, fmt.Errorf("error marshaling 'password': %w", err) + } + } + + if a.PrivateLocations != nil { + object["private_locations"], err = json.Marshal(a.PrivateLocations) + if err != nil { + return nil, fmt.Errorf("error marshaling 'private_locations': %w", err) + } + } + + if a.ProxyHeaders != nil { + object["proxy_headers"], err = json.Marshal(a.ProxyHeaders) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_headers': %w", err) + } + } + + if a.ProxyUrl != nil { + object["proxy_url"], err = json.Marshal(a.ProxyUrl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_url': %w", err) + } + } + + if a.Response != nil { + object["response"], err = json.Marshal(a.Response) + if err != nil { + return nil, fmt.Errorf("error marshaling 'response': %w", err) + } + } + + if a.RetestOnFailure != nil { + object["retest_on_failure"], err = json.Marshal(a.RetestOnFailure) + if err != nil { + return nil, fmt.Errorf("error marshaling 'retest_on_failure': %w", err) + } + } + + if a.Schedule != nil { + object["schedule"], err = json.Marshal(a.Schedule) + if err != nil { + return nil, fmt.Errorf("error marshaling 'schedule': %w", err) + } + } + + if a.ServiceName != nil { + object["service.name"], err = json.Marshal(a.ServiceName) + if err != nil { + return nil, fmt.Errorf("error marshaling 'service.name': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + if a.Tags != nil { + object["tags"], err = json.Marshal(a.Tags) + if err != nil { + return nil, fmt.Errorf("error marshaling 'tags': %w", err) + } + } + + if a.Timeout != nil { + object["timeout"], err = json.Marshal(a.Timeout) + if err != nil { + return nil, fmt.Errorf("error marshaling 'timeout': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + object["url"], err = json.Marshal(a.Url) + if err != nil { + return nil, fmt.Errorf("error marshaling 'url': %w", err) + } + + if a.Username != nil { + object["username"], err = json.Marshal(a.Username) + if err != nil { + return nil, fmt.Errorf("error marshaling 'username': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for SyntheticsHttpMonitorFields_Check_Response. Returns the specified +// element and whether it was found +func (a SyntheticsHttpMonitorFields_Check_Response) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for SyntheticsHttpMonitorFields_Check_Response +func (a *SyntheticsHttpMonitorFields_Check_Response) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for SyntheticsHttpMonitorFields_Check_Response to handle AdditionalProperties +func (a *SyntheticsHttpMonitorFields_Check_Response) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["body"]; found { + err = json.Unmarshal(raw, &a.Body) + if err != nil { + return fmt.Errorf("error reading 'body': %w", err) + } + delete(object, "body") + } + + if raw, found := object["headers"]; found { + err = json.Unmarshal(raw, &a.Headers) + if err != nil { + return fmt.Errorf("error reading 'headers': %w", err) + } + delete(object, "headers") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for SyntheticsHttpMonitorFields_Check_Response to handle AdditionalProperties +func (a SyntheticsHttpMonitorFields_Check_Response) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Body != nil { + object["body"], err = json.Marshal(a.Body) + if err != nil { + return nil, fmt.Errorf("error marshaling 'body': %w", err) + } + } + + if a.Headers != nil { + object["headers"], err = json.Marshal(a.Headers) + if err != nil { + return nil, fmt.Errorf("error marshaling 'headers': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for SyntheticsIcmpMonitorFields. Returns the specified +// element and whether it was found +func (a SyntheticsIcmpMonitorFields) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for SyntheticsIcmpMonitorFields +func (a *SyntheticsIcmpMonitorFields) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for SyntheticsIcmpMonitorFields to handle AdditionalProperties +func (a *SyntheticsIcmpMonitorFields) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["alert"]; found { + err = json.Unmarshal(raw, &a.Alert) + if err != nil { + return fmt.Errorf("error reading 'alert': %w", err) + } + delete(object, "alert") + } + + if raw, found := object["enabled"]; found { + err = json.Unmarshal(raw, &a.Enabled) + if err != nil { + return fmt.Errorf("error reading 'enabled': %w", err) + } + delete(object, "enabled") + } + + if raw, found := object["host"]; found { + err = json.Unmarshal(raw, &a.Host) + if err != nil { + return fmt.Errorf("error reading 'host': %w", err) + } + delete(object, "host") + } + + if raw, found := object["labels"]; found { + err = json.Unmarshal(raw, &a.Labels) + if err != nil { + return fmt.Errorf("error reading 'labels': %w", err) + } + delete(object, "labels") + } + + if raw, found := object["locations"]; found { + err = json.Unmarshal(raw, &a.Locations) + if err != nil { + return fmt.Errorf("error reading 'locations': %w", err) + } + delete(object, "locations") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["namespace"]; found { + err = json.Unmarshal(raw, &a.Namespace) + if err != nil { + return fmt.Errorf("error reading 'namespace': %w", err) + } + delete(object, "namespace") + } + + if raw, found := object["params"]; found { + err = json.Unmarshal(raw, &a.Params) + if err != nil { + return fmt.Errorf("error reading 'params': %w", err) + } + delete(object, "params") + } + + if raw, found := object["private_locations"]; found { + err = json.Unmarshal(raw, &a.PrivateLocations) + if err != nil { + return fmt.Errorf("error reading 'private_locations': %w", err) + } + delete(object, "private_locations") + } + + if raw, found := object["retest_on_failure"]; found { + err = json.Unmarshal(raw, &a.RetestOnFailure) + if err != nil { + return fmt.Errorf("error reading 'retest_on_failure': %w", err) + } + delete(object, "retest_on_failure") + } + + if raw, found := object["schedule"]; found { + err = json.Unmarshal(raw, &a.Schedule) + if err != nil { + return fmt.Errorf("error reading 'schedule': %w", err) + } + delete(object, "schedule") + } + + if raw, found := object["service.name"]; found { + err = json.Unmarshal(raw, &a.ServiceName) + if err != nil { + return fmt.Errorf("error reading 'service.name': %w", err) + } + delete(object, "service.name") + } + + if raw, found := object["tags"]; found { + err = json.Unmarshal(raw, &a.Tags) + if err != nil { + return fmt.Errorf("error reading 'tags': %w", err) + } + delete(object, "tags") + } + + if raw, found := object["timeout"]; found { + err = json.Unmarshal(raw, &a.Timeout) + if err != nil { + return fmt.Errorf("error reading 'timeout': %w", err) + } + delete(object, "timeout") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["wait"]; found { + err = json.Unmarshal(raw, &a.Wait) + if err != nil { + return fmt.Errorf("error reading 'wait': %w", err) + } + delete(object, "wait") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for SyntheticsIcmpMonitorFields to handle AdditionalProperties +func (a SyntheticsIcmpMonitorFields) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Alert != nil { + object["alert"], err = json.Marshal(a.Alert) + if err != nil { + return nil, fmt.Errorf("error marshaling 'alert': %w", err) + } + } + + if a.Enabled != nil { + object["enabled"], err = json.Marshal(a.Enabled) + if err != nil { + return nil, fmt.Errorf("error marshaling 'enabled': %w", err) + } + } + + object["host"], err = json.Marshal(a.Host) + if err != nil { + return nil, fmt.Errorf("error marshaling 'host': %w", err) + } + + if a.Labels != nil { + object["labels"], err = json.Marshal(a.Labels) + if err != nil { + return nil, fmt.Errorf("error marshaling 'labels': %w", err) + } + } + + if a.Locations != nil { + object["locations"], err = json.Marshal(a.Locations) + if err != nil { + return nil, fmt.Errorf("error marshaling 'locations': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Namespace != nil { + object["namespace"], err = json.Marshal(a.Namespace) + if err != nil { + return nil, fmt.Errorf("error marshaling 'namespace': %w", err) + } + } + + if a.Params != nil { + object["params"], err = json.Marshal(a.Params) + if err != nil { + return nil, fmt.Errorf("error marshaling 'params': %w", err) + } + } + + if a.PrivateLocations != nil { + object["private_locations"], err = json.Marshal(a.PrivateLocations) + if err != nil { + return nil, fmt.Errorf("error marshaling 'private_locations': %w", err) + } + } + + if a.RetestOnFailure != nil { + object["retest_on_failure"], err = json.Marshal(a.RetestOnFailure) + if err != nil { + return nil, fmt.Errorf("error marshaling 'retest_on_failure': %w", err) + } + } + + if a.Schedule != nil { + object["schedule"], err = json.Marshal(a.Schedule) + if err != nil { + return nil, fmt.Errorf("error marshaling 'schedule': %w", err) + } + } + + if a.ServiceName != nil { + object["service.name"], err = json.Marshal(a.ServiceName) + if err != nil { + return nil, fmt.Errorf("error marshaling 'service.name': %w", err) + } + } + + if a.Tags != nil { + object["tags"], err = json.Marshal(a.Tags) + if err != nil { + return nil, fmt.Errorf("error marshaling 'tags': %w", err) + } + } + + if a.Timeout != nil { + object["timeout"], err = json.Marshal(a.Timeout) + if err != nil { + return nil, fmt.Errorf("error marshaling 'timeout': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.Wait != nil { + object["wait"], err = json.Marshal(a.Wait) + if err != nil { + return nil, fmt.Errorf("error marshaling 'wait': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for SyntheticsTcpMonitorFields. Returns the specified +// element and whether it was found +func (a SyntheticsTcpMonitorFields) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for SyntheticsTcpMonitorFields +func (a *SyntheticsTcpMonitorFields) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for SyntheticsTcpMonitorFields to handle AdditionalProperties +func (a *SyntheticsTcpMonitorFields) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["alert"]; found { + err = json.Unmarshal(raw, &a.Alert) + if err != nil { + return fmt.Errorf("error reading 'alert': %w", err) + } + delete(object, "alert") + } + + if raw, found := object["enabled"]; found { + err = json.Unmarshal(raw, &a.Enabled) + if err != nil { + return fmt.Errorf("error reading 'enabled': %w", err) + } + delete(object, "enabled") + } + + if raw, found := object["host"]; found { + err = json.Unmarshal(raw, &a.Host) + if err != nil { + return fmt.Errorf("error reading 'host': %w", err) + } + delete(object, "host") + } + + if raw, found := object["labels"]; found { + err = json.Unmarshal(raw, &a.Labels) + if err != nil { + return fmt.Errorf("error reading 'labels': %w", err) + } + delete(object, "labels") + } + + if raw, found := object["locations"]; found { + err = json.Unmarshal(raw, &a.Locations) + if err != nil { + return fmt.Errorf("error reading 'locations': %w", err) + } + delete(object, "locations") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["namespace"]; found { + err = json.Unmarshal(raw, &a.Namespace) + if err != nil { + return fmt.Errorf("error reading 'namespace': %w", err) + } + delete(object, "namespace") + } + + if raw, found := object["params"]; found { + err = json.Unmarshal(raw, &a.Params) + if err != nil { + return fmt.Errorf("error reading 'params': %w", err) + } + delete(object, "params") + } + + if raw, found := object["private_locations"]; found { + err = json.Unmarshal(raw, &a.PrivateLocations) + if err != nil { + return fmt.Errorf("error reading 'private_locations': %w", err) + } + delete(object, "private_locations") + } + + if raw, found := object["proxy_url"]; found { + err = json.Unmarshal(raw, &a.ProxyUrl) + if err != nil { + return fmt.Errorf("error reading 'proxy_url': %w", err) + } + delete(object, "proxy_url") + } + + if raw, found := object["proxy_use_local_resolver"]; found { + err = json.Unmarshal(raw, &a.ProxyUseLocalResolver) + if err != nil { + return fmt.Errorf("error reading 'proxy_use_local_resolver': %w", err) + } + delete(object, "proxy_use_local_resolver") + } + + if raw, found := object["retest_on_failure"]; found { + err = json.Unmarshal(raw, &a.RetestOnFailure) + if err != nil { + return fmt.Errorf("error reading 'retest_on_failure': %w", err) + } + delete(object, "retest_on_failure") + } + + if raw, found := object["schedule"]; found { + err = json.Unmarshal(raw, &a.Schedule) + if err != nil { + return fmt.Errorf("error reading 'schedule': %w", err) + } + delete(object, "schedule") + } + + if raw, found := object["service.name"]; found { + err = json.Unmarshal(raw, &a.ServiceName) + if err != nil { + return fmt.Errorf("error reading 'service.name': %w", err) + } + delete(object, "service.name") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if raw, found := object["tags"]; found { + err = json.Unmarshal(raw, &a.Tags) + if err != nil { + return fmt.Errorf("error reading 'tags': %w", err) + } + delete(object, "tags") + } + + if raw, found := object["timeout"]; found { + err = json.Unmarshal(raw, &a.Timeout) + if err != nil { + return fmt.Errorf("error reading 'timeout': %w", err) + } + delete(object, "timeout") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for SyntheticsTcpMonitorFields to handle AdditionalProperties +func (a SyntheticsTcpMonitorFields) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Alert != nil { + object["alert"], err = json.Marshal(a.Alert) + if err != nil { + return nil, fmt.Errorf("error marshaling 'alert': %w", err) + } + } + + if a.Enabled != nil { + object["enabled"], err = json.Marshal(a.Enabled) + if err != nil { + return nil, fmt.Errorf("error marshaling 'enabled': %w", err) + } + } + + object["host"], err = json.Marshal(a.Host) + if err != nil { + return nil, fmt.Errorf("error marshaling 'host': %w", err) + } + + if a.Labels != nil { + object["labels"], err = json.Marshal(a.Labels) + if err != nil { + return nil, fmt.Errorf("error marshaling 'labels': %w", err) + } + } + + if a.Locations != nil { + object["locations"], err = json.Marshal(a.Locations) + if err != nil { + return nil, fmt.Errorf("error marshaling 'locations': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Namespace != nil { + object["namespace"], err = json.Marshal(a.Namespace) + if err != nil { + return nil, fmt.Errorf("error marshaling 'namespace': %w", err) + } + } + + if a.Params != nil { + object["params"], err = json.Marshal(a.Params) + if err != nil { + return nil, fmt.Errorf("error marshaling 'params': %w", err) + } + } + + if a.PrivateLocations != nil { + object["private_locations"], err = json.Marshal(a.PrivateLocations) + if err != nil { + return nil, fmt.Errorf("error marshaling 'private_locations': %w", err) + } + } + + if a.ProxyUrl != nil { + object["proxy_url"], err = json.Marshal(a.ProxyUrl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_url': %w", err) + } + } + + if a.ProxyUseLocalResolver != nil { + object["proxy_use_local_resolver"], err = json.Marshal(a.ProxyUseLocalResolver) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_use_local_resolver': %w", err) + } + } + + if a.RetestOnFailure != nil { + object["retest_on_failure"], err = json.Marshal(a.RetestOnFailure) + if err != nil { + return nil, fmt.Errorf("error marshaling 'retest_on_failure': %w", err) + } + } + + if a.Schedule != nil { + object["schedule"], err = json.Marshal(a.Schedule) + if err != nil { + return nil, fmt.Errorf("error marshaling 'schedule': %w", err) + } + } + + if a.ServiceName != nil { + object["service.name"], err = json.Marshal(a.ServiceName) + if err != nil { + return nil, fmt.Errorf("error marshaling 'service.name': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + if a.Tags != nil { + object["tags"], err = json.Marshal(a.Tags) + if err != nil { + return nil, fmt.Errorf("error marshaling 'tags': %w", err) + } + } + + if a.Timeout != nil { + object["timeout"], err = json.Marshal(a.Timeout) + if err != nil { + return nil, fmt.Errorf("error marshaling 'timeout': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges. Returns the specified +// element and whether it was found +func (a AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges +func (a *AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges to handle AdditionalProperties +func (a *AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["cluster"]; found { + err = json.Unmarshal(raw, &a.Cluster) + if err != nil { + return fmt.Errorf("error reading 'cluster': %w", err) + } + delete(object, "cluster") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges to handle AdditionalProperties +func (a AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Cluster != nil { + object["cluster"], err = json.Marshal(a.Cluster) + if err != nil { + return nil, fmt.Errorf("error marshaling 'cluster': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch. Returns the specified +// element and whether it was found +func (a AgentPolicy_PackagePolicies_1_Elasticsearch) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch +func (a *AgentPolicy_PackagePolicies_1_Elasticsearch) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch to handle AdditionalProperties +func (a *AgentPolicy_PackagePolicies_1_Elasticsearch) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["privileges"]; found { + err = json.Unmarshal(raw, &a.Privileges) + if err != nil { + return fmt.Errorf("error reading 'privileges': %w", err) + } + delete(object, "privileges") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch to handle AdditionalProperties +func (a AgentPolicy_PackagePolicies_1_Elasticsearch) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Privileges != nil { + object["privileges"], err = json.Marshal(a.Privileges) + if err != nil { + return nil, fmt.Errorf("error marshaling 'privileges': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for CreateConnectorConfig. Returns the specified +// element and whether it was found +func (a CreateConnectorConfig) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for CreateConnectorConfig +func (a *CreateConnectorConfig) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Getter for additional properties for CreateConnectorSecrets. Returns the specified +// element and whether it was found +func (a CreateConnectorSecrets) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for CreateConnectorSecrets +func (a *CreateConnectorSecrets) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Getter for additional properties for OutputElasticsearch. Returns the specified +// element and whether it was found +func (a OutputElasticsearch) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputElasticsearch +func (a *OutputElasticsearch) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputElasticsearch to handle AdditionalProperties +func (a *OutputElasticsearch) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["allow_edit"]; found { + err = json.Unmarshal(raw, &a.AllowEdit) + if err != nil { + return fmt.Errorf("error reading 'allow_edit': %w", err) + } + delete(object, "allow_edit") + } + + if raw, found := object["ca_sha256"]; found { + err = json.Unmarshal(raw, &a.CaSha256) + if err != nil { + return fmt.Errorf("error reading 'ca_sha256': %w", err) + } + delete(object, "ca_sha256") + } + + if raw, found := object["ca_trusted_fingerprint"]; found { + err = json.Unmarshal(raw, &a.CaTrustedFingerprint) + if err != nil { + return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) + } + delete(object, "ca_trusted_fingerprint") + } + + if raw, found := object["config_yaml"]; found { + err = json.Unmarshal(raw, &a.ConfigYaml) + if err != nil { + return fmt.Errorf("error reading 'config_yaml': %w", err) + } + delete(object, "config_yaml") + } + + if raw, found := object["hosts"]; found { + err = json.Unmarshal(raw, &a.Hosts) + if err != nil { + return fmt.Errorf("error reading 'hosts': %w", err) + } + delete(object, "hosts") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["is_default"]; found { + err = json.Unmarshal(raw, &a.IsDefault) + if err != nil { + return fmt.Errorf("error reading 'is_default': %w", err) + } + delete(object, "is_default") + } + + if raw, found := object["is_default_monitoring"]; found { + err = json.Unmarshal(raw, &a.IsDefaultMonitoring) + if err != nil { + return fmt.Errorf("error reading 'is_default_monitoring': %w", err) + } + delete(object, "is_default_monitoring") + } + + if raw, found := object["is_internal"]; found { + err = json.Unmarshal(raw, &a.IsInternal) + if err != nil { + return fmt.Errorf("error reading 'is_internal': %w", err) + } + delete(object, "is_internal") + } + + if raw, found := object["is_preconfigured"]; found { + err = json.Unmarshal(raw, &a.IsPreconfigured) + if err != nil { + return fmt.Errorf("error reading 'is_preconfigured': %w", err) + } + delete(object, "is_preconfigured") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["preset"]; found { + err = json.Unmarshal(raw, &a.Preset) + if err != nil { + return fmt.Errorf("error reading 'preset': %w", err) + } + delete(object, "preset") + } + + if raw, found := object["proxy_id"]; found { + err = json.Unmarshal(raw, &a.ProxyId) + if err != nil { + return fmt.Errorf("error reading 'proxy_id': %w", err) + } + delete(object, "proxy_id") + } + + if raw, found := object["secrets"]; found { + err = json.Unmarshal(raw, &a.Secrets) + if err != nil { + return fmt.Errorf("error reading 'secrets': %w", err) + } + delete(object, "secrets") + } + + if raw, found := object["shipper"]; found { + err = json.Unmarshal(raw, &a.Shipper) + if err != nil { + return fmt.Errorf("error reading 'shipper': %w", err) + } + delete(object, "shipper") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["write_to_logs_streams"]; found { + err = json.Unmarshal(raw, &a.WriteToLogsStreams) + if err != nil { + return fmt.Errorf("error reading 'write_to_logs_streams': %w", err) + } + delete(object, "write_to_logs_streams") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputElasticsearch to handle AdditionalProperties +func (a OutputElasticsearch) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AllowEdit != nil { + object["allow_edit"], err = json.Marshal(a.AllowEdit) + if err != nil { + return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) + } + } + + if a.CaSha256 != nil { + object["ca_sha256"], err = json.Marshal(a.CaSha256) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) + } + } + + if a.CaTrustedFingerprint != nil { + object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) + } + } + + if a.ConfigYaml != nil { + object["config_yaml"], err = json.Marshal(a.ConfigYaml) + if err != nil { + return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) + } + } + + object["hosts"], err = json.Marshal(a.Hosts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hosts': %w", err) + } + + if a.Id != nil { + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + } + + if a.IsDefault != nil { + object["is_default"], err = json.Marshal(a.IsDefault) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default': %w", err) + } + } + + if a.IsDefaultMonitoring != nil { + object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) + } + } + + if a.IsInternal != nil { + object["is_internal"], err = json.Marshal(a.IsInternal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) + } + } + + if a.IsPreconfigured != nil { + object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Preset != nil { + object["preset"], err = json.Marshal(a.Preset) + if err != nil { + return nil, fmt.Errorf("error marshaling 'preset': %w", err) + } + } + + if a.ProxyId != nil { + object["proxy_id"], err = json.Marshal(a.ProxyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) + } + } + + if a.Secrets != nil { + object["secrets"], err = json.Marshal(a.Secrets) + if err != nil { + return nil, fmt.Errorf("error marshaling 'secrets': %w", err) + } + } + + if a.Shipper != nil { + object["shipper"], err = json.Marshal(a.Shipper) + if err != nil { + return nil, fmt.Errorf("error marshaling 'shipper': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.WriteToLogsStreams != nil { + object["write_to_logs_streams"], err = json.Marshal(a.WriteToLogsStreams) + if err != nil { + return nil, fmt.Errorf("error marshaling 'write_to_logs_streams': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputElasticsearchSecretsSslKey0. Returns the specified +// element and whether it was found +func (a OutputElasticsearchSecretsSslKey0) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputElasticsearchSecretsSslKey0 +func (a *OutputElasticsearchSecretsSslKey0) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputElasticsearchSecretsSslKey0 to handle AdditionalProperties +func (a *OutputElasticsearchSecretsSslKey0) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputElasticsearchSecretsSslKey0 to handle AdditionalProperties +func (a OutputElasticsearchSecretsSslKey0) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputElasticsearch_Secrets_Ssl. Returns the specified +// element and whether it was found +func (a OutputElasticsearch_Secrets_Ssl) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputElasticsearch_Secrets_Ssl +func (a *OutputElasticsearch_Secrets_Ssl) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputElasticsearch_Secrets_Ssl to handle AdditionalProperties +func (a *OutputElasticsearch_Secrets_Ssl) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputElasticsearch_Secrets_Ssl to handle AdditionalProperties +func (a OutputElasticsearch_Secrets_Ssl) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Key != nil { + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputElasticsearch_Secrets. Returns the specified +// element and whether it was found +func (a OutputElasticsearch_Secrets) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputElasticsearch_Secrets +func (a *OutputElasticsearch_Secrets) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputElasticsearch_Secrets to handle AdditionalProperties +func (a *OutputElasticsearch_Secrets) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputElasticsearch_Secrets to handle AdditionalProperties +func (a OutputElasticsearch_Secrets) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka. Returns the specified +// element and whether it was found +func (a OutputKafka) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka +func (a *OutputKafka) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka to handle AdditionalProperties +func (a *OutputKafka) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["allow_edit"]; found { + err = json.Unmarshal(raw, &a.AllowEdit) + if err != nil { + return fmt.Errorf("error reading 'allow_edit': %w", err) + } + delete(object, "allow_edit") + } + + if raw, found := object["auth_type"]; found { + err = json.Unmarshal(raw, &a.AuthType) + if err != nil { + return fmt.Errorf("error reading 'auth_type': %w", err) + } + delete(object, "auth_type") + } + + if raw, found := object["broker_timeout"]; found { + err = json.Unmarshal(raw, &a.BrokerTimeout) + if err != nil { + return fmt.Errorf("error reading 'broker_timeout': %w", err) + } + delete(object, "broker_timeout") + } + + if raw, found := object["ca_sha256"]; found { + err = json.Unmarshal(raw, &a.CaSha256) + if err != nil { + return fmt.Errorf("error reading 'ca_sha256': %w", err) + } + delete(object, "ca_sha256") + } + + if raw, found := object["ca_trusted_fingerprint"]; found { + err = json.Unmarshal(raw, &a.CaTrustedFingerprint) + if err != nil { + return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) + } + delete(object, "ca_trusted_fingerprint") + } + + if raw, found := object["client_id"]; found { + err = json.Unmarshal(raw, &a.ClientId) + if err != nil { + return fmt.Errorf("error reading 'client_id': %w", err) + } + delete(object, "client_id") + } + + if raw, found := object["compression"]; found { + err = json.Unmarshal(raw, &a.Compression) + if err != nil { + return fmt.Errorf("error reading 'compression': %w", err) + } + delete(object, "compression") + } + + if raw, found := object["compression_level"]; found { + err = json.Unmarshal(raw, &a.CompressionLevel) + if err != nil { + return fmt.Errorf("error reading 'compression_level': %w", err) + } + delete(object, "compression_level") + } + + if raw, found := object["config_yaml"]; found { + err = json.Unmarshal(raw, &a.ConfigYaml) + if err != nil { + return fmt.Errorf("error reading 'config_yaml': %w", err) + } + delete(object, "config_yaml") + } + + if raw, found := object["connection_type"]; found { + err = json.Unmarshal(raw, &a.ConnectionType) + if err != nil { + return fmt.Errorf("error reading 'connection_type': %w", err) + } + delete(object, "connection_type") + } + + if raw, found := object["hash"]; found { + err = json.Unmarshal(raw, &a.Hash) + if err != nil { + return fmt.Errorf("error reading 'hash': %w", err) + } + delete(object, "hash") + } + + if raw, found := object["headers"]; found { + err = json.Unmarshal(raw, &a.Headers) + if err != nil { + return fmt.Errorf("error reading 'headers': %w", err) + } + delete(object, "headers") + } + + if raw, found := object["hosts"]; found { + err = json.Unmarshal(raw, &a.Hosts) + if err != nil { + return fmt.Errorf("error reading 'hosts': %w", err) + } + delete(object, "hosts") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["is_default"]; found { + err = json.Unmarshal(raw, &a.IsDefault) + if err != nil { + return fmt.Errorf("error reading 'is_default': %w", err) + } + delete(object, "is_default") + } + + if raw, found := object["is_default_monitoring"]; found { + err = json.Unmarshal(raw, &a.IsDefaultMonitoring) + if err != nil { + return fmt.Errorf("error reading 'is_default_monitoring': %w", err) + } + delete(object, "is_default_monitoring") + } + + if raw, found := object["is_internal"]; found { + err = json.Unmarshal(raw, &a.IsInternal) + if err != nil { + return fmt.Errorf("error reading 'is_internal': %w", err) + } + delete(object, "is_internal") + } + + if raw, found := object["is_preconfigured"]; found { + err = json.Unmarshal(raw, &a.IsPreconfigured) + if err != nil { + return fmt.Errorf("error reading 'is_preconfigured': %w", err) + } + delete(object, "is_preconfigured") + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["partition"]; found { + err = json.Unmarshal(raw, &a.Partition) + if err != nil { + return fmt.Errorf("error reading 'partition': %w", err) + } + delete(object, "partition") + } + + if raw, found := object["password"]; found { + err = json.Unmarshal(raw, &a.Password) + if err != nil { + return fmt.Errorf("error reading 'password': %w", err) + } + delete(object, "password") + } + + if raw, found := object["proxy_id"]; found { + err = json.Unmarshal(raw, &a.ProxyId) + if err != nil { + return fmt.Errorf("error reading 'proxy_id': %w", err) + } + delete(object, "proxy_id") + } + + if raw, found := object["random"]; found { + err = json.Unmarshal(raw, &a.Random) + if err != nil { + return fmt.Errorf("error reading 'random': %w", err) + } + delete(object, "random") + } + + if raw, found := object["required_acks"]; found { + err = json.Unmarshal(raw, &a.RequiredAcks) + if err != nil { + return fmt.Errorf("error reading 'required_acks': %w", err) + } + delete(object, "required_acks") + } + + if raw, found := object["round_robin"]; found { + err = json.Unmarshal(raw, &a.RoundRobin) + if err != nil { + return fmt.Errorf("error reading 'round_robin': %w", err) + } + delete(object, "round_robin") + } + + if raw, found := object["sasl"]; found { + err = json.Unmarshal(raw, &a.Sasl) + if err != nil { + return fmt.Errorf("error reading 'sasl': %w", err) + } + delete(object, "sasl") + } + + if raw, found := object["secrets"]; found { + err = json.Unmarshal(raw, &a.Secrets) + if err != nil { + return fmt.Errorf("error reading 'secrets': %w", err) + } + delete(object, "secrets") + } + + if raw, found := object["shipper"]; found { + err = json.Unmarshal(raw, &a.Shipper) + if err != nil { + return fmt.Errorf("error reading 'shipper': %w", err) + } + delete(object, "shipper") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if raw, found := object["timeout"]; found { + err = json.Unmarshal(raw, &a.Timeout) + if err != nil { + return fmt.Errorf("error reading 'timeout': %w", err) + } + delete(object, "timeout") + } + + if raw, found := object["topic"]; found { + err = json.Unmarshal(raw, &a.Topic) + if err != nil { + return fmt.Errorf("error reading 'topic': %w", err) + } + delete(object, "topic") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["username"]; found { + err = json.Unmarshal(raw, &a.Username) + if err != nil { + return fmt.Errorf("error reading 'username': %w", err) + } + delete(object, "username") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if raw, found := object["write_to_logs_streams"]; found { + err = json.Unmarshal(raw, &a.WriteToLogsStreams) + if err != nil { + return fmt.Errorf("error reading 'write_to_logs_streams': %w", err) + } + delete(object, "write_to_logs_streams") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka to handle AdditionalProperties +func (a OutputKafka) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AllowEdit != nil { + object["allow_edit"], err = json.Marshal(a.AllowEdit) + if err != nil { + return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) + } + } + + object["auth_type"], err = json.Marshal(a.AuthType) + if err != nil { + return nil, fmt.Errorf("error marshaling 'auth_type': %w", err) + } + + if a.BrokerTimeout != nil { + object["broker_timeout"], err = json.Marshal(a.BrokerTimeout) + if err != nil { + return nil, fmt.Errorf("error marshaling 'broker_timeout': %w", err) + } + } + + if a.CaSha256 != nil { + object["ca_sha256"], err = json.Marshal(a.CaSha256) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) + } + } + + if a.CaTrustedFingerprint != nil { + object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) + } + } + + if a.ClientId != nil { + object["client_id"], err = json.Marshal(a.ClientId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'client_id': %w", err) + } + } + + if a.Compression != nil { + object["compression"], err = json.Marshal(a.Compression) + if err != nil { + return nil, fmt.Errorf("error marshaling 'compression': %w", err) + } + } + + object["compression_level"], err = json.Marshal(a.CompressionLevel) + if err != nil { + return nil, fmt.Errorf("error marshaling 'compression_level': %w", err) + } + + if a.ConfigYaml != nil { + object["config_yaml"], err = json.Marshal(a.ConfigYaml) + if err != nil { + return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) + } + } + + object["connection_type"], err = json.Marshal(a.ConnectionType) + if err != nil { + return nil, fmt.Errorf("error marshaling 'connection_type': %w", err) + } + + if a.Hash != nil { + object["hash"], err = json.Marshal(a.Hash) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hash': %w", err) + } + } + + if a.Headers != nil { + object["headers"], err = json.Marshal(a.Headers) + if err != nil { + return nil, fmt.Errorf("error marshaling 'headers': %w", err) + } + } + + object["hosts"], err = json.Marshal(a.Hosts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hosts': %w", err) + } + + if a.Id != nil { + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + } + + if a.IsDefault != nil { + object["is_default"], err = json.Marshal(a.IsDefault) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default': %w", err) + } + } + + if a.IsDefaultMonitoring != nil { + object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) + } + } + + if a.IsInternal != nil { + object["is_internal"], err = json.Marshal(a.IsInternal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) + } + } + + if a.IsPreconfigured != nil { + object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) + } + } + + if a.Key != nil { + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Partition != nil { + object["partition"], err = json.Marshal(a.Partition) + if err != nil { + return nil, fmt.Errorf("error marshaling 'partition': %w", err) + } + } + + object["password"], err = json.Marshal(a.Password) + if err != nil { + return nil, fmt.Errorf("error marshaling 'password': %w", err) + } + + if a.ProxyId != nil { + object["proxy_id"], err = json.Marshal(a.ProxyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) + } + } + + if a.Random != nil { + object["random"], err = json.Marshal(a.Random) + if err != nil { + return nil, fmt.Errorf("error marshaling 'random': %w", err) + } + } + + if a.RequiredAcks != nil { + object["required_acks"], err = json.Marshal(a.RequiredAcks) + if err != nil { + return nil, fmt.Errorf("error marshaling 'required_acks': %w", err) + } + } + + if a.RoundRobin != nil { + object["round_robin"], err = json.Marshal(a.RoundRobin) + if err != nil { + return nil, fmt.Errorf("error marshaling 'round_robin': %w", err) + } + } + + if a.Sasl != nil { + object["sasl"], err = json.Marshal(a.Sasl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'sasl': %w", err) + } + } + + if a.Secrets != nil { + object["secrets"], err = json.Marshal(a.Secrets) + if err != nil { + return nil, fmt.Errorf("error marshaling 'secrets': %w", err) + } + } + + if a.Shipper != nil { + object["shipper"], err = json.Marshal(a.Shipper) + if err != nil { + return nil, fmt.Errorf("error marshaling 'shipper': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + if a.Timeout != nil { + object["timeout"], err = json.Marshal(a.Timeout) + if err != nil { + return nil, fmt.Errorf("error marshaling 'timeout': %w", err) + } + } + + if a.Topic != nil { + object["topic"], err = json.Marshal(a.Topic) + if err != nil { + return nil, fmt.Errorf("error marshaling 'topic': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + object["username"], err = json.Marshal(a.Username) + if err != nil { + return nil, fmt.Errorf("error marshaling 'username': %w", err) + } + + if a.Version != nil { + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + } + + if a.WriteToLogsStreams != nil { + object["write_to_logs_streams"], err = json.Marshal(a.WriteToLogsStreams) + if err != nil { + return nil, fmt.Errorf("error marshaling 'write_to_logs_streams': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Hash. Returns the specified +// element and whether it was found +func (a OutputKafka_Hash) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Hash +func (a *OutputKafka_Hash) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Hash to handle AdditionalProperties +func (a *OutputKafka_Hash) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["hash"]; found { + err = json.Unmarshal(raw, &a.Hash) + if err != nil { + return fmt.Errorf("error reading 'hash': %w", err) + } + delete(object, "hash") + } + + if raw, found := object["random"]; found { + err = json.Unmarshal(raw, &a.Random) + if err != nil { + return fmt.Errorf("error reading 'random': %w", err) + } + delete(object, "random") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Hash to handle AdditionalProperties +func (a OutputKafka_Hash) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Hash != nil { + object["hash"], err = json.Marshal(a.Hash) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hash': %w", err) + } + } + + if a.Random != nil { + object["random"], err = json.Marshal(a.Random) + if err != nil { + return nil, fmt.Errorf("error marshaling 'random': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Headers_Item. Returns the specified +// element and whether it was found +func (a OutputKafka_Headers_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Headers_Item +func (a *OutputKafka_Headers_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Headers_Item to handle AdditionalProperties +func (a *OutputKafka_Headers_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if raw, found := object["value"]; found { + err = json.Unmarshal(raw, &a.Value) + if err != nil { + return fmt.Errorf("error reading 'value': %w", err) + } + delete(object, "value") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Headers_Item to handle AdditionalProperties +func (a OutputKafka_Headers_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + + object["value"], err = json.Marshal(a.Value) + if err != nil { + return nil, fmt.Errorf("error marshaling 'value': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Random. Returns the specified +// element and whether it was found +func (a OutputKafka_Random) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Random +func (a *OutputKafka_Random) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Random to handle AdditionalProperties +func (a *OutputKafka_Random) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["group_events"]; found { + err = json.Unmarshal(raw, &a.GroupEvents) + if err != nil { + return fmt.Errorf("error reading 'group_events': %w", err) + } + delete(object, "group_events") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Random to handle AdditionalProperties +func (a OutputKafka_Random) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.GroupEvents != nil { + object["group_events"], err = json.Marshal(a.GroupEvents) + if err != nil { + return nil, fmt.Errorf("error marshaling 'group_events': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_RoundRobin. Returns the specified +// element and whether it was found +func (a OutputKafka_RoundRobin) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_RoundRobin +func (a *OutputKafka_RoundRobin) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_RoundRobin to handle AdditionalProperties +func (a *OutputKafka_RoundRobin) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["group_events"]; found { + err = json.Unmarshal(raw, &a.GroupEvents) + if err != nil { + return fmt.Errorf("error reading 'group_events': %w", err) + } + delete(object, "group_events") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_RoundRobin to handle AdditionalProperties +func (a OutputKafka_RoundRobin) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.GroupEvents != nil { + object["group_events"], err = json.Marshal(a.GroupEvents) + if err != nil { + return nil, fmt.Errorf("error marshaling 'group_events': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Sasl. Returns the specified +// element and whether it was found +func (a OutputKafka_Sasl) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Sasl +func (a *OutputKafka_Sasl) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Sasl to handle AdditionalProperties +func (a *OutputKafka_Sasl) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["mechanism"]; found { + err = json.Unmarshal(raw, &a.Mechanism) + if err != nil { + return fmt.Errorf("error reading 'mechanism': %w", err) + } + delete(object, "mechanism") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Sasl to handle AdditionalProperties +func (a OutputKafka_Sasl) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Mechanism != nil { + object["mechanism"], err = json.Marshal(a.Mechanism) + if err != nil { + return nil, fmt.Errorf("error marshaling 'mechanism': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafkaSecretsPassword0. Returns the specified +// element and whether it was found +func (a OutputKafkaSecretsPassword0) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafkaSecretsPassword0 +func (a *OutputKafkaSecretsPassword0) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafkaSecretsPassword0 to handle AdditionalProperties +func (a *OutputKafkaSecretsPassword0) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafkaSecretsPassword0 to handle AdditionalProperties +func (a OutputKafkaSecretsPassword0) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafkaSecretsSslKey0. Returns the specified +// element and whether it was found +func (a OutputKafkaSecretsSslKey0) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafkaSecretsSslKey0 +func (a *OutputKafkaSecretsSslKey0) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafkaSecretsSslKey0 to handle AdditionalProperties +func (a *OutputKafkaSecretsSslKey0) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafkaSecretsSslKey0 to handle AdditionalProperties +func (a OutputKafkaSecretsSslKey0) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Secrets_Ssl. Returns the specified +// element and whether it was found +func (a OutputKafka_Secrets_Ssl) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Secrets_Ssl +func (a *OutputKafka_Secrets_Ssl) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Secrets_Ssl to handle AdditionalProperties +func (a *OutputKafka_Secrets_Ssl) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Secrets_Ssl to handle AdditionalProperties +func (a OutputKafka_Secrets_Ssl) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputKafka_Secrets. Returns the specified +// element and whether it was found +func (a OutputKafka_Secrets) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputKafka_Secrets +func (a *OutputKafka_Secrets) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputKafka_Secrets to handle AdditionalProperties +func (a *OutputKafka_Secrets) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["password"]; found { + err = json.Unmarshal(raw, &a.Password) + if err != nil { + return fmt.Errorf("error reading 'password': %w", err) + } + delete(object, "password") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputKafka_Secrets to handle AdditionalProperties +func (a OutputKafka_Secrets) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Password != nil { + object["password"], err = json.Marshal(a.Password) + if err != nil { + return nil, fmt.Errorf("error marshaling 'password': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputLogstash. Returns the specified +// element and whether it was found +func (a OutputLogstash) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputLogstash +func (a *OutputLogstash) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputLogstash to handle AdditionalProperties +func (a *OutputLogstash) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["allow_edit"]; found { + err = json.Unmarshal(raw, &a.AllowEdit) + if err != nil { + return fmt.Errorf("error reading 'allow_edit': %w", err) + } + delete(object, "allow_edit") + } + + if raw, found := object["ca_sha256"]; found { + err = json.Unmarshal(raw, &a.CaSha256) + if err != nil { + return fmt.Errorf("error reading 'ca_sha256': %w", err) + } + delete(object, "ca_sha256") + } + + if raw, found := object["ca_trusted_fingerprint"]; found { + err = json.Unmarshal(raw, &a.CaTrustedFingerprint) + if err != nil { + return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) + } + delete(object, "ca_trusted_fingerprint") + } + + if raw, found := object["config_yaml"]; found { + err = json.Unmarshal(raw, &a.ConfigYaml) + if err != nil { + return fmt.Errorf("error reading 'config_yaml': %w", err) + } + delete(object, "config_yaml") + } + + if raw, found := object["hosts"]; found { + err = json.Unmarshal(raw, &a.Hosts) + if err != nil { + return fmt.Errorf("error reading 'hosts': %w", err) + } + delete(object, "hosts") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["is_default"]; found { + err = json.Unmarshal(raw, &a.IsDefault) + if err != nil { + return fmt.Errorf("error reading 'is_default': %w", err) + } + delete(object, "is_default") + } + + if raw, found := object["is_default_monitoring"]; found { + err = json.Unmarshal(raw, &a.IsDefaultMonitoring) + if err != nil { + return fmt.Errorf("error reading 'is_default_monitoring': %w", err) + } + delete(object, "is_default_monitoring") + } + + if raw, found := object["is_internal"]; found { + err = json.Unmarshal(raw, &a.IsInternal) + if err != nil { + return fmt.Errorf("error reading 'is_internal': %w", err) + } + delete(object, "is_internal") + } + + if raw, found := object["is_preconfigured"]; found { + err = json.Unmarshal(raw, &a.IsPreconfigured) + if err != nil { + return fmt.Errorf("error reading 'is_preconfigured': %w", err) + } + delete(object, "is_preconfigured") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["proxy_id"]; found { + err = json.Unmarshal(raw, &a.ProxyId) + if err != nil { + return fmt.Errorf("error reading 'proxy_id': %w", err) + } + delete(object, "proxy_id") + } + + if raw, found := object["secrets"]; found { + err = json.Unmarshal(raw, &a.Secrets) + if err != nil { + return fmt.Errorf("error reading 'secrets': %w", err) + } + delete(object, "secrets") + } + + if raw, found := object["shipper"]; found { + err = json.Unmarshal(raw, &a.Shipper) + if err != nil { + return fmt.Errorf("error reading 'shipper': %w", err) + } + delete(object, "shipper") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["write_to_logs_streams"]; found { + err = json.Unmarshal(raw, &a.WriteToLogsStreams) + if err != nil { + return fmt.Errorf("error reading 'write_to_logs_streams': %w", err) + } + delete(object, "write_to_logs_streams") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputLogstash to handle AdditionalProperties +func (a OutputLogstash) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AllowEdit != nil { + object["allow_edit"], err = json.Marshal(a.AllowEdit) + if err != nil { + return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) + } + } + + if a.CaSha256 != nil { + object["ca_sha256"], err = json.Marshal(a.CaSha256) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) + } + } + + if a.CaTrustedFingerprint != nil { + object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) + } + } + + if a.ConfigYaml != nil { + object["config_yaml"], err = json.Marshal(a.ConfigYaml) + if err != nil { + return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) + } + } + + object["hosts"], err = json.Marshal(a.Hosts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hosts': %w", err) + } + + if a.Id != nil { + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + } + + if a.IsDefault != nil { + object["is_default"], err = json.Marshal(a.IsDefault) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default': %w", err) + } + } + + if a.IsDefaultMonitoring != nil { + object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) + } + } + + if a.IsInternal != nil { + object["is_internal"], err = json.Marshal(a.IsInternal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) + } + } + + if a.IsPreconfigured != nil { + object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.ProxyId != nil { + object["proxy_id"], err = json.Marshal(a.ProxyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) + } + } + + if a.Secrets != nil { + object["secrets"], err = json.Marshal(a.Secrets) + if err != nil { + return nil, fmt.Errorf("error marshaling 'secrets': %w", err) + } + } + + if a.Shipper != nil { + object["shipper"], err = json.Marshal(a.Shipper) + if err != nil { + return nil, fmt.Errorf("error marshaling 'shipper': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.WriteToLogsStreams != nil { + object["write_to_logs_streams"], err = json.Marshal(a.WriteToLogsStreams) + if err != nil { + return nil, fmt.Errorf("error marshaling 'write_to_logs_streams': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputLogstashSecretsSslKey0. Returns the specified +// element and whether it was found +func (a OutputLogstashSecretsSslKey0) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputLogstashSecretsSslKey0 +func (a *OutputLogstashSecretsSslKey0) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputLogstashSecretsSslKey0 to handle AdditionalProperties +func (a *OutputLogstashSecretsSslKey0) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputLogstashSecretsSslKey0 to handle AdditionalProperties +func (a OutputLogstashSecretsSslKey0) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputLogstash_Secrets_Ssl. Returns the specified +// element and whether it was found +func (a OutputLogstash_Secrets_Ssl) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputLogstash_Secrets_Ssl +func (a *OutputLogstash_Secrets_Ssl) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputLogstash_Secrets_Ssl to handle AdditionalProperties +func (a *OutputLogstash_Secrets_Ssl) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputLogstash_Secrets_Ssl to handle AdditionalProperties +func (a OutputLogstash_Secrets_Ssl) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Key != nil { + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputLogstash_Secrets. Returns the specified +// element and whether it was found +func (a OutputLogstash_Secrets) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputLogstash_Secrets +func (a *OutputLogstash_Secrets) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputLogstash_Secrets to handle AdditionalProperties +func (a *OutputLogstash_Secrets) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputLogstash_Secrets to handle AdditionalProperties +func (a OutputLogstash_Secrets) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputRemoteElasticsearch. Returns the specified +// element and whether it was found +func (a OutputRemoteElasticsearch) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputRemoteElasticsearch +func (a *OutputRemoteElasticsearch) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputRemoteElasticsearch to handle AdditionalProperties +func (a *OutputRemoteElasticsearch) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["allow_edit"]; found { + err = json.Unmarshal(raw, &a.AllowEdit) + if err != nil { + return fmt.Errorf("error reading 'allow_edit': %w", err) + } + delete(object, "allow_edit") + } + + if raw, found := object["ca_sha256"]; found { + err = json.Unmarshal(raw, &a.CaSha256) + if err != nil { + return fmt.Errorf("error reading 'ca_sha256': %w", err) + } + delete(object, "ca_sha256") + } + + if raw, found := object["ca_trusted_fingerprint"]; found { + err = json.Unmarshal(raw, &a.CaTrustedFingerprint) + if err != nil { + return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) + } + delete(object, "ca_trusted_fingerprint") + } + + if raw, found := object["config_yaml"]; found { + err = json.Unmarshal(raw, &a.ConfigYaml) + if err != nil { + return fmt.Errorf("error reading 'config_yaml': %w", err) + } + delete(object, "config_yaml") + } + + if raw, found := object["hosts"]; found { + err = json.Unmarshal(raw, &a.Hosts) + if err != nil { + return fmt.Errorf("error reading 'hosts': %w", err) + } + delete(object, "hosts") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["is_default"]; found { + err = json.Unmarshal(raw, &a.IsDefault) + if err != nil { + return fmt.Errorf("error reading 'is_default': %w", err) + } + delete(object, "is_default") + } + + if raw, found := object["is_default_monitoring"]; found { + err = json.Unmarshal(raw, &a.IsDefaultMonitoring) + if err != nil { + return fmt.Errorf("error reading 'is_default_monitoring': %w", err) + } + delete(object, "is_default_monitoring") + } + + if raw, found := object["is_internal"]; found { + err = json.Unmarshal(raw, &a.IsInternal) + if err != nil { + return fmt.Errorf("error reading 'is_internal': %w", err) + } + delete(object, "is_internal") + } + + if raw, found := object["is_preconfigured"]; found { + err = json.Unmarshal(raw, &a.IsPreconfigured) + if err != nil { + return fmt.Errorf("error reading 'is_preconfigured': %w", err) + } + delete(object, "is_preconfigured") + } + + if raw, found := object["kibana_api_key"]; found { + err = json.Unmarshal(raw, &a.KibanaApiKey) + if err != nil { + return fmt.Errorf("error reading 'kibana_api_key': %w", err) + } + delete(object, "kibana_api_key") + } + + if raw, found := object["kibana_url"]; found { + err = json.Unmarshal(raw, &a.KibanaUrl) + if err != nil { + return fmt.Errorf("error reading 'kibana_url': %w", err) + } + delete(object, "kibana_url") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["preset"]; found { + err = json.Unmarshal(raw, &a.Preset) + if err != nil { + return fmt.Errorf("error reading 'preset': %w", err) + } + delete(object, "preset") + } + + if raw, found := object["proxy_id"]; found { + err = json.Unmarshal(raw, &a.ProxyId) + if err != nil { + return fmt.Errorf("error reading 'proxy_id': %w", err) + } + delete(object, "proxy_id") + } + + if raw, found := object["secrets"]; found { + err = json.Unmarshal(raw, &a.Secrets) + if err != nil { + return fmt.Errorf("error reading 'secrets': %w", err) + } + delete(object, "secrets") + } + + if raw, found := object["service_token"]; found { + err = json.Unmarshal(raw, &a.ServiceToken) + if err != nil { + return fmt.Errorf("error reading 'service_token': %w", err) + } + delete(object, "service_token") + } + + if raw, found := object["shipper"]; found { + err = json.Unmarshal(raw, &a.Shipper) + if err != nil { + return fmt.Errorf("error reading 'shipper': %w", err) + } + delete(object, "shipper") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if raw, found := object["sync_integrations"]; found { + err = json.Unmarshal(raw, &a.SyncIntegrations) + if err != nil { + return fmt.Errorf("error reading 'sync_integrations': %w", err) + } + delete(object, "sync_integrations") + } + + if raw, found := object["sync_uninstalled_integrations"]; found { + err = json.Unmarshal(raw, &a.SyncUninstalledIntegrations) + if err != nil { + return fmt.Errorf("error reading 'sync_uninstalled_integrations': %w", err) + } + delete(object, "sync_uninstalled_integrations") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["write_to_logs_streams"]; found { + err = json.Unmarshal(raw, &a.WriteToLogsStreams) + if err != nil { + return fmt.Errorf("error reading 'write_to_logs_streams': %w", err) + } + delete(object, "write_to_logs_streams") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputRemoteElasticsearch to handle AdditionalProperties +func (a OutputRemoteElasticsearch) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AllowEdit != nil { + object["allow_edit"], err = json.Marshal(a.AllowEdit) + if err != nil { + return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) + } + } + + if a.CaSha256 != nil { + object["ca_sha256"], err = json.Marshal(a.CaSha256) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) + } + } + + if a.CaTrustedFingerprint != nil { + object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) + } + } + + if a.ConfigYaml != nil { + object["config_yaml"], err = json.Marshal(a.ConfigYaml) + if err != nil { + return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) + } + } + + object["hosts"], err = json.Marshal(a.Hosts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'hosts': %w", err) + } + + if a.Id != nil { + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + } + + if a.IsDefault != nil { + object["is_default"], err = json.Marshal(a.IsDefault) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default': %w", err) + } + } + + if a.IsDefaultMonitoring != nil { + object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) + } + } + + if a.IsInternal != nil { + object["is_internal"], err = json.Marshal(a.IsInternal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) + } + } + + if a.IsPreconfigured != nil { + object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) + if err != nil { + return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) + } + } + + if a.KibanaApiKey != nil { + object["kibana_api_key"], err = json.Marshal(a.KibanaApiKey) + if err != nil { + return nil, fmt.Errorf("error marshaling 'kibana_api_key': %w", err) + } + } + + if a.KibanaUrl != nil { + object["kibana_url"], err = json.Marshal(a.KibanaUrl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'kibana_url': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Preset != nil { + object["preset"], err = json.Marshal(a.Preset) + if err != nil { + return nil, fmt.Errorf("error marshaling 'preset': %w", err) + } + } + + if a.ProxyId != nil { + object["proxy_id"], err = json.Marshal(a.ProxyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) + } + } + + if a.Secrets != nil { + object["secrets"], err = json.Marshal(a.Secrets) + if err != nil { + return nil, fmt.Errorf("error marshaling 'secrets': %w", err) + } + } + + if a.ServiceToken != nil { + object["service_token"], err = json.Marshal(a.ServiceToken) + if err != nil { + return nil, fmt.Errorf("error marshaling 'service_token': %w", err) + } + } + + if a.Shipper != nil { + object["shipper"], err = json.Marshal(a.Shipper) + if err != nil { + return nil, fmt.Errorf("error marshaling 'shipper': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + if a.SyncIntegrations != nil { + object["sync_integrations"], err = json.Marshal(a.SyncIntegrations) + if err != nil { + return nil, fmt.Errorf("error marshaling 'sync_integrations': %w", err) + } + } + + if a.SyncUninstalledIntegrations != nil { + object["sync_uninstalled_integrations"], err = json.Marshal(a.SyncUninstalledIntegrations) + if err != nil { + return nil, fmt.Errorf("error marshaling 'sync_uninstalled_integrations': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.WriteToLogsStreams != nil { + object["write_to_logs_streams"], err = json.Marshal(a.WriteToLogsStreams) + if err != nil { + return nil, fmt.Errorf("error marshaling 'write_to_logs_streams': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputRemoteElasticsearchSecretsServiceToken0. Returns the specified +// element and whether it was found +func (a OutputRemoteElasticsearchSecretsServiceToken0) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputRemoteElasticsearchSecretsServiceToken0 +func (a *OutputRemoteElasticsearchSecretsServiceToken0) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputRemoteElasticsearchSecretsServiceToken0 to handle AdditionalProperties +func (a *OutputRemoteElasticsearchSecretsServiceToken0) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputRemoteElasticsearchSecretsServiceToken0 to handle AdditionalProperties +func (a OutputRemoteElasticsearchSecretsServiceToken0) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputRemoteElasticsearchSecretsSslKey0. Returns the specified +// element and whether it was found +func (a OutputRemoteElasticsearchSecretsSslKey0) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputRemoteElasticsearchSecretsSslKey0 +func (a *OutputRemoteElasticsearchSecretsSslKey0) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputRemoteElasticsearchSecretsSslKey0 to handle AdditionalProperties +func (a *OutputRemoteElasticsearchSecretsSslKey0) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputRemoteElasticsearchSecretsSslKey0 to handle AdditionalProperties +func (a OutputRemoteElasticsearchSecretsSslKey0) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputRemoteElasticsearch_Secrets_Ssl. Returns the specified +// element and whether it was found +func (a OutputRemoteElasticsearch_Secrets_Ssl) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputRemoteElasticsearch_Secrets_Ssl +func (a *OutputRemoteElasticsearch_Secrets_Ssl) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputRemoteElasticsearch_Secrets_Ssl to handle AdditionalProperties +func (a *OutputRemoteElasticsearch_Secrets_Ssl) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputRemoteElasticsearch_Secrets_Ssl to handle AdditionalProperties +func (a OutputRemoteElasticsearch_Secrets_Ssl) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Key != nil { + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputRemoteElasticsearch_Secrets. Returns the specified +// element and whether it was found +func (a OutputRemoteElasticsearch_Secrets) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputRemoteElasticsearch_Secrets +func (a *OutputRemoteElasticsearch_Secrets) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputRemoteElasticsearch_Secrets to handle AdditionalProperties +func (a *OutputRemoteElasticsearch_Secrets) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["service_token"]; found { + err = json.Unmarshal(raw, &a.ServiceToken) + if err != nil { + return fmt.Errorf("error reading 'service_token': %w", err) + } + delete(object, "service_token") + } + + if raw, found := object["ssl"]; found { + err = json.Unmarshal(raw, &a.Ssl) + if err != nil { + return fmt.Errorf("error reading 'ssl': %w", err) + } + delete(object, "ssl") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputRemoteElasticsearch_Secrets to handle AdditionalProperties +func (a OutputRemoteElasticsearch_Secrets) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.ServiceToken != nil { + object["service_token"], err = json.Marshal(a.ServiceToken) + if err != nil { + return nil, fmt.Errorf("error marshaling 'service_token': %w", err) + } + } + + if a.Ssl != nil { + object["ssl"], err = json.Marshal(a.Ssl) + if err != nil { + return nil, fmt.Errorf("error marshaling 'ssl': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputShipper. Returns the specified +// element and whether it was found +func (a OutputShipper) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputShipper +func (a *OutputShipper) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputShipper to handle AdditionalProperties +func (a *OutputShipper) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["compression_level"]; found { + err = json.Unmarshal(raw, &a.CompressionLevel) + if err != nil { + return fmt.Errorf("error reading 'compression_level': %w", err) + } + delete(object, "compression_level") + } + + if raw, found := object["disk_queue_compression_enabled"]; found { + err = json.Unmarshal(raw, &a.DiskQueueCompressionEnabled) + if err != nil { + return fmt.Errorf("error reading 'disk_queue_compression_enabled': %w", err) + } + delete(object, "disk_queue_compression_enabled") + } + + if raw, found := object["disk_queue_enabled"]; found { + err = json.Unmarshal(raw, &a.DiskQueueEnabled) + if err != nil { + return fmt.Errorf("error reading 'disk_queue_enabled': %w", err) + } + delete(object, "disk_queue_enabled") + } + + if raw, found := object["disk_queue_encryption_enabled"]; found { + err = json.Unmarshal(raw, &a.DiskQueueEncryptionEnabled) + if err != nil { + return fmt.Errorf("error reading 'disk_queue_encryption_enabled': %w", err) + } + delete(object, "disk_queue_encryption_enabled") + } + + if raw, found := object["disk_queue_max_size"]; found { + err = json.Unmarshal(raw, &a.DiskQueueMaxSize) + if err != nil { + return fmt.Errorf("error reading 'disk_queue_max_size': %w", err) + } + delete(object, "disk_queue_max_size") + } + + if raw, found := object["disk_queue_path"]; found { + err = json.Unmarshal(raw, &a.DiskQueuePath) + if err != nil { + return fmt.Errorf("error reading 'disk_queue_path': %w", err) + } + delete(object, "disk_queue_path") + } + + if raw, found := object["loadbalance"]; found { + err = json.Unmarshal(raw, &a.Loadbalance) + if err != nil { + return fmt.Errorf("error reading 'loadbalance': %w", err) + } + delete(object, "loadbalance") + } + + if raw, found := object["max_batch_bytes"]; found { + err = json.Unmarshal(raw, &a.MaxBatchBytes) + if err != nil { + return fmt.Errorf("error reading 'max_batch_bytes': %w", err) + } + delete(object, "max_batch_bytes") + } + + if raw, found := object["mem_queue_events"]; found { + err = json.Unmarshal(raw, &a.MemQueueEvents) + if err != nil { + return fmt.Errorf("error reading 'mem_queue_events': %w", err) + } + delete(object, "mem_queue_events") + } + + if raw, found := object["queue_flush_timeout"]; found { + err = json.Unmarshal(raw, &a.QueueFlushTimeout) + if err != nil { + return fmt.Errorf("error reading 'queue_flush_timeout': %w", err) + } + delete(object, "queue_flush_timeout") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputShipper to handle AdditionalProperties +func (a OutputShipper) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["compression_level"], err = json.Marshal(a.CompressionLevel) + if err != nil { + return nil, fmt.Errorf("error marshaling 'compression_level': %w", err) + } + + object["disk_queue_compression_enabled"], err = json.Marshal(a.DiskQueueCompressionEnabled) + if err != nil { + return nil, fmt.Errorf("error marshaling 'disk_queue_compression_enabled': %w", err) + } + + if a.DiskQueueEnabled != nil { + object["disk_queue_enabled"], err = json.Marshal(a.DiskQueueEnabled) + if err != nil { + return nil, fmt.Errorf("error marshaling 'disk_queue_enabled': %w", err) + } + } + + object["disk_queue_encryption_enabled"], err = json.Marshal(a.DiskQueueEncryptionEnabled) + if err != nil { + return nil, fmt.Errorf("error marshaling 'disk_queue_encryption_enabled': %w", err) + } + + object["disk_queue_max_size"], err = json.Marshal(a.DiskQueueMaxSize) + if err != nil { + return nil, fmt.Errorf("error marshaling 'disk_queue_max_size': %w", err) + } + + object["disk_queue_path"], err = json.Marshal(a.DiskQueuePath) + if err != nil { + return nil, fmt.Errorf("error marshaling 'disk_queue_path': %w", err) + } + + object["loadbalance"], err = json.Marshal(a.Loadbalance) + if err != nil { + return nil, fmt.Errorf("error marshaling 'loadbalance': %w", err) + } + + object["max_batch_bytes"], err = json.Marshal(a.MaxBatchBytes) + if err != nil { + return nil, fmt.Errorf("error marshaling 'max_batch_bytes': %w", err) + } + + object["mem_queue_events"], err = json.Marshal(a.MemQueueEvents) + if err != nil { + return nil, fmt.Errorf("error marshaling 'mem_queue_events': %w", err) + } + + object["queue_flush_timeout"], err = json.Marshal(a.QueueFlushTimeout) + if err != nil { + return nil, fmt.Errorf("error marshaling 'queue_flush_timeout': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for OutputSsl. Returns the specified +// element and whether it was found +func (a OutputSsl) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for OutputSsl +func (a *OutputSsl) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for OutputSsl to handle AdditionalProperties +func (a *OutputSsl) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["certificate"]; found { + err = json.Unmarshal(raw, &a.Certificate) + if err != nil { + return fmt.Errorf("error reading 'certificate': %w", err) + } + delete(object, "certificate") + } + + if raw, found := object["certificate_authorities"]; found { + err = json.Unmarshal(raw, &a.CertificateAuthorities) + if err != nil { + return fmt.Errorf("error reading 'certificate_authorities': %w", err) + } + delete(object, "certificate_authorities") + } + + if raw, found := object["key"]; found { + err = json.Unmarshal(raw, &a.Key) + if err != nil { + return fmt.Errorf("error reading 'key': %w", err) + } + delete(object, "key") + } + + if raw, found := object["verification_mode"]; found { + err = json.Unmarshal(raw, &a.VerificationMode) + if err != nil { + return fmt.Errorf("error reading 'verification_mode': %w", err) + } + delete(object, "verification_mode") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for OutputSsl to handle AdditionalProperties +func (a OutputSsl) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Certificate != nil { + object["certificate"], err = json.Marshal(a.Certificate) + if err != nil { + return nil, fmt.Errorf("error marshaling 'certificate': %w", err) + } + } + + if a.CertificateAuthorities != nil { + object["certificate_authorities"], err = json.Marshal(a.CertificateAuthorities) + if err != nil { + return nil, fmt.Errorf("error marshaling 'certificate_authorities': %w", err) + } + } + + if a.Key != nil { + object["key"], err = json.Marshal(a.Key) + if err != nil { + return nil, fmt.Errorf("error marshaling 'key': %w", err) + } + } + + if a.VerificationMode != nil { + object["verification_mode"], err = json.Marshal(a.VerificationMode) + if err != nil { + return nil, fmt.Errorf("error marshaling 'verification_mode': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo. Returns the specified +// element and whether it was found +func (a PackageInfo) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo +func (a *PackageInfo) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo to handle AdditionalProperties +func (a *PackageInfo) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["agent"]; found { + err = json.Unmarshal(raw, &a.Agent) + if err != nil { + return fmt.Errorf("error reading 'agent': %w", err) + } + delete(object, "agent") + } + + if raw, found := object["asset_tags"]; found { + err = json.Unmarshal(raw, &a.AssetTags) + if err != nil { + return fmt.Errorf("error reading 'asset_tags': %w", err) + } + delete(object, "asset_tags") + } + + if raw, found := object["assets"]; found { + err = json.Unmarshal(raw, &a.Assets) + if err != nil { + return fmt.Errorf("error reading 'assets': %w", err) + } + delete(object, "assets") + } + + if raw, found := object["categories"]; found { + err = json.Unmarshal(raw, &a.Categories) + if err != nil { + return fmt.Errorf("error reading 'categories': %w", err) + } + delete(object, "categories") + } + + if raw, found := object["conditions"]; found { + err = json.Unmarshal(raw, &a.Conditions) + if err != nil { + return fmt.Errorf("error reading 'conditions': %w", err) + } + delete(object, "conditions") + } + + if raw, found := object["data_streams"]; found { + err = json.Unmarshal(raw, &a.DataStreams) + if err != nil { + return fmt.Errorf("error reading 'data_streams': %w", err) + } + delete(object, "data_streams") + } + + if raw, found := object["description"]; found { + err = json.Unmarshal(raw, &a.Description) + if err != nil { + return fmt.Errorf("error reading 'description': %w", err) + } + delete(object, "description") + } + + if raw, found := object["discovery"]; found { + err = json.Unmarshal(raw, &a.Discovery) + if err != nil { + return fmt.Errorf("error reading 'discovery': %w", err) + } + delete(object, "discovery") + } + + if raw, found := object["download"]; found { + err = json.Unmarshal(raw, &a.Download) + if err != nil { + return fmt.Errorf("error reading 'download': %w", err) + } + delete(object, "download") + } + + if raw, found := object["elasticsearch"]; found { + err = json.Unmarshal(raw, &a.Elasticsearch) + if err != nil { + return fmt.Errorf("error reading 'elasticsearch': %w", err) + } + delete(object, "elasticsearch") + } + + if raw, found := object["format_version"]; found { + err = json.Unmarshal(raw, &a.FormatVersion) + if err != nil { + return fmt.Errorf("error reading 'format_version': %w", err) + } + delete(object, "format_version") + } + + if raw, found := object["icons"]; found { + err = json.Unmarshal(raw, &a.Icons) + if err != nil { + return fmt.Errorf("error reading 'icons': %w", err) + } + delete(object, "icons") + } + + if raw, found := object["installationInfo"]; found { + err = json.Unmarshal(raw, &a.InstallationInfo) + if err != nil { + return fmt.Errorf("error reading 'installationInfo': %w", err) + } + delete(object, "installationInfo") + } + + if raw, found := object["internal"]; found { + err = json.Unmarshal(raw, &a.Internal) + if err != nil { + return fmt.Errorf("error reading 'internal': %w", err) + } + delete(object, "internal") + } + + if raw, found := object["keepPoliciesUpToDate"]; found { + err = json.Unmarshal(raw, &a.KeepPoliciesUpToDate) + if err != nil { + return fmt.Errorf("error reading 'keepPoliciesUpToDate': %w", err) + } + delete(object, "keepPoliciesUpToDate") + } + + if raw, found := object["latestVersion"]; found { + err = json.Unmarshal(raw, &a.LatestVersion) + if err != nil { + return fmt.Errorf("error reading 'latestVersion': %w", err) + } + delete(object, "latestVersion") + } + + if raw, found := object["license"]; found { + err = json.Unmarshal(raw, &a.License) + if err != nil { + return fmt.Errorf("error reading 'license': %w", err) + } + delete(object, "license") + } + + if raw, found := object["licensePath"]; found { + err = json.Unmarshal(raw, &a.LicensePath) + if err != nil { + return fmt.Errorf("error reading 'licensePath': %w", err) + } + delete(object, "licensePath") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["notice"]; found { + err = json.Unmarshal(raw, &a.Notice) + if err != nil { + return fmt.Errorf("error reading 'notice': %w", err) + } + delete(object, "notice") + } + + if raw, found := object["owner"]; found { + err = json.Unmarshal(raw, &a.Owner) + if err != nil { + return fmt.Errorf("error reading 'owner': %w", err) + } + delete(object, "owner") + } + + if raw, found := object["path"]; found { + err = json.Unmarshal(raw, &a.Path) + if err != nil { + return fmt.Errorf("error reading 'path': %w", err) + } + delete(object, "path") + } + + if raw, found := object["policy_templates"]; found { + err = json.Unmarshal(raw, &a.PolicyTemplates) + if err != nil { + return fmt.Errorf("error reading 'policy_templates': %w", err) + } + delete(object, "policy_templates") + } + + if raw, found := object["readme"]; found { + err = json.Unmarshal(raw, &a.Readme) + if err != nil { + return fmt.Errorf("error reading 'readme': %w", err) + } + delete(object, "readme") + } + + if raw, found := object["release"]; found { + err = json.Unmarshal(raw, &a.Release) + if err != nil { + return fmt.Errorf("error reading 'release': %w", err) + } + delete(object, "release") + } + + if raw, found := object["screenshots"]; found { + err = json.Unmarshal(raw, &a.Screenshots) + if err != nil { + return fmt.Errorf("error reading 'screenshots': %w", err) + } + delete(object, "screenshots") + } + + if raw, found := object["signature_path"]; found { + err = json.Unmarshal(raw, &a.SignaturePath) + if err != nil { + return fmt.Errorf("error reading 'signature_path': %w", err) + } + delete(object, "signature_path") + } + + if raw, found := object["source"]; found { + err = json.Unmarshal(raw, &a.Source) + if err != nil { + return fmt.Errorf("error reading 'source': %w", err) + } + delete(object, "source") + } + + if raw, found := object["status"]; found { + err = json.Unmarshal(raw, &a.Status) + if err != nil { + return fmt.Errorf("error reading 'status': %w", err) + } + delete(object, "status") + } + + if raw, found := object["title"]; found { + err = json.Unmarshal(raw, &a.Title) + if err != nil { + return fmt.Errorf("error reading 'title': %w", err) + } + delete(object, "title") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["vars"]; found { + err = json.Unmarshal(raw, &a.Vars) + if err != nil { + return fmt.Errorf("error reading 'vars': %w", err) + } + delete(object, "vars") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo to handle AdditionalProperties +func (a PackageInfo) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Agent != nil { + object["agent"], err = json.Marshal(a.Agent) + if err != nil { + return nil, fmt.Errorf("error marshaling 'agent': %w", err) + } + } + + if a.AssetTags != nil { + object["asset_tags"], err = json.Marshal(a.AssetTags) + if err != nil { + return nil, fmt.Errorf("error marshaling 'asset_tags': %w", err) + } + } + + object["assets"], err = json.Marshal(a.Assets) + if err != nil { + return nil, fmt.Errorf("error marshaling 'assets': %w", err) + } + + if a.Categories != nil { + object["categories"], err = json.Marshal(a.Categories) + if err != nil { + return nil, fmt.Errorf("error marshaling 'categories': %w", err) + } + } + + if a.Conditions != nil { + object["conditions"], err = json.Marshal(a.Conditions) + if err != nil { + return nil, fmt.Errorf("error marshaling 'conditions': %w", err) + } + } + + if a.DataStreams != nil { + object["data_streams"], err = json.Marshal(a.DataStreams) + if err != nil { + return nil, fmt.Errorf("error marshaling 'data_streams': %w", err) + } + } + + if a.Description != nil { + object["description"], err = json.Marshal(a.Description) + if err != nil { + return nil, fmt.Errorf("error marshaling 'description': %w", err) + } + } + + if a.Discovery != nil { + object["discovery"], err = json.Marshal(a.Discovery) + if err != nil { + return nil, fmt.Errorf("error marshaling 'discovery': %w", err) + } + } + + if a.Download != nil { + object["download"], err = json.Marshal(a.Download) + if err != nil { + return nil, fmt.Errorf("error marshaling 'download': %w", err) + } + } + + if a.Elasticsearch != nil { + object["elasticsearch"], err = json.Marshal(a.Elasticsearch) + if err != nil { + return nil, fmt.Errorf("error marshaling 'elasticsearch': %w", err) + } + } + + if a.FormatVersion != nil { + object["format_version"], err = json.Marshal(a.FormatVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'format_version': %w", err) + } + } + + if a.Icons != nil { + object["icons"], err = json.Marshal(a.Icons) + if err != nil { + return nil, fmt.Errorf("error marshaling 'icons': %w", err) + } + } + + if a.InstallationInfo != nil { + object["installationInfo"], err = json.Marshal(a.InstallationInfo) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installationInfo': %w", err) + } + } + + if a.Internal != nil { + object["internal"], err = json.Marshal(a.Internal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'internal': %w", err) + } + } + + if a.KeepPoliciesUpToDate != nil { + object["keepPoliciesUpToDate"], err = json.Marshal(a.KeepPoliciesUpToDate) + if err != nil { + return nil, fmt.Errorf("error marshaling 'keepPoliciesUpToDate': %w", err) + } + } + + if a.LatestVersion != nil { + object["latestVersion"], err = json.Marshal(a.LatestVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latestVersion': %w", err) + } + } + + if a.License != nil { + object["license"], err = json.Marshal(a.License) + if err != nil { + return nil, fmt.Errorf("error marshaling 'license': %w", err) + } + } + + if a.LicensePath != nil { + object["licensePath"], err = json.Marshal(a.LicensePath) + if err != nil { + return nil, fmt.Errorf("error marshaling 'licensePath': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Notice != nil { + object["notice"], err = json.Marshal(a.Notice) + if err != nil { + return nil, fmt.Errorf("error marshaling 'notice': %w", err) + } + } + + if a.Owner != nil { + object["owner"], err = json.Marshal(a.Owner) + if err != nil { + return nil, fmt.Errorf("error marshaling 'owner': %w", err) + } + } + + if a.Path != nil { + object["path"], err = json.Marshal(a.Path) + if err != nil { + return nil, fmt.Errorf("error marshaling 'path': %w", err) + } + } + + if a.PolicyTemplates != nil { + object["policy_templates"], err = json.Marshal(a.PolicyTemplates) + if err != nil { + return nil, fmt.Errorf("error marshaling 'policy_templates': %w", err) + } + } + + if a.Readme != nil { + object["readme"], err = json.Marshal(a.Readme) + if err != nil { + return nil, fmt.Errorf("error marshaling 'readme': %w", err) + } + } + + if a.Release != nil { + object["release"], err = json.Marshal(a.Release) + if err != nil { + return nil, fmt.Errorf("error marshaling 'release': %w", err) + } + } + + if a.Screenshots != nil { + object["screenshots"], err = json.Marshal(a.Screenshots) + if err != nil { + return nil, fmt.Errorf("error marshaling 'screenshots': %w", err) + } + } + + if a.SignaturePath != nil { + object["signature_path"], err = json.Marshal(a.SignaturePath) + if err != nil { + return nil, fmt.Errorf("error marshaling 'signature_path': %w", err) + } + } + + if a.Source != nil { + object["source"], err = json.Marshal(a.Source) + if err != nil { + return nil, fmt.Errorf("error marshaling 'source': %w", err) + } + } + + if a.Status != nil { + object["status"], err = json.Marshal(a.Status) + if err != nil { + return nil, fmt.Errorf("error marshaling 'status': %w", err) + } + } + + object["title"], err = json.Marshal(a.Title) + if err != nil { + return nil, fmt.Errorf("error marshaling 'title': %w", err) + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + if a.Vars != nil { + object["vars"], err = json.Marshal(a.Vars) + if err != nil { + return nil, fmt.Errorf("error marshaling 'vars': %w", err) + } + } + + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Conditions_Elastic. Returns the specified +// element and whether it was found +func (a PackageInfo_Conditions_Elastic) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Conditions_Elastic +func (a *PackageInfo_Conditions_Elastic) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Conditions_Elastic to handle AdditionalProperties +func (a *PackageInfo_Conditions_Elastic) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["capabilities"]; found { + err = json.Unmarshal(raw, &a.Capabilities) + if err != nil { + return fmt.Errorf("error reading 'capabilities': %w", err) + } + delete(object, "capabilities") + } + + if raw, found := object["subscription"]; found { + err = json.Unmarshal(raw, &a.Subscription) + if err != nil { + return fmt.Errorf("error reading 'subscription': %w", err) + } + delete(object, "subscription") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Conditions_Elastic to handle AdditionalProperties +func (a PackageInfo_Conditions_Elastic) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Capabilities != nil { + object["capabilities"], err = json.Marshal(a.Capabilities) + if err != nil { + return nil, fmt.Errorf("error marshaling 'capabilities': %w", err) + } + } + + if a.Subscription != nil { + object["subscription"], err = json.Marshal(a.Subscription) + if err != nil { + return nil, fmt.Errorf("error marshaling 'subscription': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Conditions_Kibana. Returns the specified +// element and whether it was found +func (a PackageInfo_Conditions_Kibana) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Conditions_Kibana +func (a *PackageInfo_Conditions_Kibana) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Conditions_Kibana to handle AdditionalProperties +func (a *PackageInfo_Conditions_Kibana) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Conditions_Kibana to handle AdditionalProperties +func (a PackageInfo_Conditions_Kibana) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Version != nil { + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Conditions. Returns the specified +// element and whether it was found +func (a PackageInfo_Conditions) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Conditions +func (a *PackageInfo_Conditions) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Conditions to handle AdditionalProperties +func (a *PackageInfo_Conditions) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["elastic"]; found { + err = json.Unmarshal(raw, &a.Elastic) + if err != nil { + return fmt.Errorf("error reading 'elastic': %w", err) + } + delete(object, "elastic") + } + + if raw, found := object["kibana"]; found { + err = json.Unmarshal(raw, &a.Kibana) + if err != nil { + return fmt.Errorf("error reading 'kibana': %w", err) + } + delete(object, "kibana") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Conditions to handle AdditionalProperties +func (a PackageInfo_Conditions) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Elastic != nil { + object["elastic"], err = json.Marshal(a.Elastic) + if err != nil { + return nil, fmt.Errorf("error marshaling 'elastic': %w", err) + } + } + + if a.Kibana != nil { + object["kibana"], err = json.Marshal(a.Kibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'kibana': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Discovery_Datasets_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_Discovery_Datasets_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Discovery_Datasets_Item +func (a *PackageInfo_Discovery_Datasets_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Discovery_Datasets_Item to handle AdditionalProperties +func (a *PackageInfo_Discovery_Datasets_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Discovery_Datasets_Item to handle AdditionalProperties +func (a PackageInfo_Discovery_Datasets_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Discovery_Fields_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_Discovery_Fields_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Discovery_Fields_Item +func (a *PackageInfo_Discovery_Fields_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Discovery_Fields_Item to handle AdditionalProperties +func (a *PackageInfo_Discovery_Fields_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Discovery_Fields_Item to handle AdditionalProperties +func (a PackageInfo_Discovery_Fields_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Discovery. Returns the specified +// element and whether it was found +func (a PackageInfo_Discovery) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Discovery +func (a *PackageInfo_Discovery) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Discovery to handle AdditionalProperties +func (a *PackageInfo_Discovery) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["datasets"]; found { + err = json.Unmarshal(raw, &a.Datasets) + if err != nil { + return fmt.Errorf("error reading 'datasets': %w", err) + } + delete(object, "datasets") + } + + if raw, found := object["fields"]; found { + err = json.Unmarshal(raw, &a.Fields) + if err != nil { + return fmt.Errorf("error reading 'fields': %w", err) + } + delete(object, "fields") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Discovery to handle AdditionalProperties +func (a PackageInfo_Discovery) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Datasets != nil { + object["datasets"], err = json.Marshal(a.Datasets) + if err != nil { + return nil, fmt.Errorf("error marshaling 'datasets': %w", err) + } + } + + if a.Fields != nil { + object["fields"], err = json.Marshal(a.Fields) + if err != nil { + return nil, fmt.Errorf("error marshaling 'fields': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Icons_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_Icons_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Icons_Item +func (a *PackageInfo_Icons_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Icons_Item to handle AdditionalProperties +func (a *PackageInfo_Icons_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["dark_mode"]; found { + err = json.Unmarshal(raw, &a.DarkMode) + if err != nil { + return fmt.Errorf("error reading 'dark_mode': %w", err) + } + delete(object, "dark_mode") + } + + if raw, found := object["path"]; found { + err = json.Unmarshal(raw, &a.Path) + if err != nil { + return fmt.Errorf("error reading 'path': %w", err) + } + delete(object, "path") + } + + if raw, found := object["size"]; found { + err = json.Unmarshal(raw, &a.Size) + if err != nil { + return fmt.Errorf("error reading 'size': %w", err) + } + delete(object, "size") + } + + if raw, found := object["src"]; found { + err = json.Unmarshal(raw, &a.Src) + if err != nil { + return fmt.Errorf("error reading 'src': %w", err) + } + delete(object, "src") + } + + if raw, found := object["title"]; found { + err = json.Unmarshal(raw, &a.Title) + if err != nil { + return fmt.Errorf("error reading 'title': %w", err) + } + delete(object, "title") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Icons_Item to handle AdditionalProperties +func (a PackageInfo_Icons_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.DarkMode != nil { + object["dark_mode"], err = json.Marshal(a.DarkMode) + if err != nil { + return nil, fmt.Errorf("error marshaling 'dark_mode': %w", err) + } + } + + if a.Path != nil { + object["path"], err = json.Marshal(a.Path) + if err != nil { + return nil, fmt.Errorf("error marshaling 'path': %w", err) + } + } + + if a.Size != nil { + object["size"], err = json.Marshal(a.Size) + if err != nil { + return nil, fmt.Errorf("error marshaling 'size': %w", err) + } + } + + object["src"], err = json.Marshal(a.Src) + if err != nil { + return nil, fmt.Errorf("error marshaling 'src': %w", err) + } + + if a.Title != nil { + object["title"], err = json.Marshal(a.Title) + if err != nil { + return nil, fmt.Errorf("error marshaling 'title': %w", err) + } + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item +func (a *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["originId"]; found { + err = json.Unmarshal(raw, &a.OriginId) + if err != nil { + return fmt.Errorf("error reading 'originId': %w", err) + } + delete(object, "originId") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + if a.OriginId != nil { + object["originId"], err = json.Marshal(a.OriginId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'originId': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features +func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["doc_value_only_numeric"]; found { + err = json.Unmarshal(raw, &a.DocValueOnlyNumeric) + if err != nil { + return fmt.Errorf("error reading 'doc_value_only_numeric': %w", err) + } + delete(object, "doc_value_only_numeric") + } + + if raw, found := object["doc_value_only_other"]; found { + err = json.Unmarshal(raw, &a.DocValueOnlyOther) + if err != nil { + return fmt.Errorf("error reading 'doc_value_only_other': %w", err) + } + delete(object, "doc_value_only_other") + } + + if raw, found := object["synthetic_source"]; found { + err = json.Unmarshal(raw, &a.SyntheticSource) + if err != nil { + return fmt.Errorf("error reading 'synthetic_source': %w", err) + } + delete(object, "synthetic_source") + } + + if raw, found := object["tsdb"]; found { + err = json.Unmarshal(raw, &a.Tsdb) + if err != nil { + return fmt.Errorf("error reading 'tsdb': %w", err) + } + delete(object, "tsdb") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.DocValueOnlyNumeric != nil { + object["doc_value_only_numeric"], err = json.Marshal(a.DocValueOnlyNumeric) + if err != nil { + return nil, fmt.Errorf("error marshaling 'doc_value_only_numeric': %w", err) + } + } + + if a.DocValueOnlyOther != nil { + object["doc_value_only_other"], err = json.Marshal(a.DocValueOnlyOther) + if err != nil { + return nil, fmt.Errorf("error marshaling 'doc_value_only_other': %w", err) + } + } + + if a.SyntheticSource != nil { + object["synthetic_source"], err = json.Marshal(a.SyntheticSource) + if err != nil { + return nil, fmt.Errorf("error marshaling 'synthetic_source': %w", err) + } + } + + if a.Tsdb != nil { + object["tsdb"], err = json.Marshal(a.Tsdb) + if err != nil { + return nil, fmt.Errorf("error marshaling 'tsdb': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item +func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["data_stream"]; found { + err = json.Unmarshal(raw, &a.DataStream) + if err != nil { + return fmt.Errorf("error reading 'data_stream': %w", err) + } + delete(object, "data_stream") + } + + if raw, found := object["features"]; found { + err = json.Unmarshal(raw, &a.Features) + if err != nil { + return fmt.Errorf("error reading 'features': %w", err) + } + delete(object, "features") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["data_stream"], err = json.Marshal(a.DataStream) + if err != nil { + return nil, fmt.Errorf("error marshaling 'data_stream': %w", err) + } + + object["features"], err = json.Marshal(a.Features) + if err != nil { + return nil, fmt.Errorf("error marshaling 'features': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_InstalledEs_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_InstalledEs_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_InstalledEs_Item +func (a *PackageInfo_InstallationInfo_InstalledEs_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_InstalledEs_Item to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_InstalledEs_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["deferred"]; found { + err = json.Unmarshal(raw, &a.Deferred) + if err != nil { + return fmt.Errorf("error reading 'deferred': %w", err) + } + delete(object, "deferred") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_InstalledEs_Item to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_InstalledEs_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Deferred != nil { + object["deferred"], err = json.Marshal(a.Deferred) + if err != nil { + return nil, fmt.Errorf("error marshaling 'deferred': %w", err) + } + } + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.Version != nil { + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_InstalledKibana_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_InstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_InstalledKibana_Item +func (a *PackageInfo_InstallationInfo_InstalledKibana_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_InstalledKibana_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["originId"]; found { + err = json.Unmarshal(raw, &a.OriginId) + if err != nil { + return fmt.Errorf("error reading 'originId': %w", err) + } + delete(object, "originId") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_InstalledKibana_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + if a.OriginId != nil { + object["originId"], err = json.Marshal(a.OriginId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'originId': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_LatestExecutedState. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_LatestExecutedState) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_LatestExecutedState +func (a *PackageInfo_InstallationInfo_LatestExecutedState) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestExecutedState to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_LatestExecutedState) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["error"]; found { + err = json.Unmarshal(raw, &a.Error) + if err != nil { + return fmt.Errorf("error reading 'error': %w", err) + } + delete(object, "error") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["started_at"]; found { + err = json.Unmarshal(raw, &a.StartedAt) + if err != nil { + return fmt.Errorf("error reading 'started_at': %w", err) + } + delete(object, "started_at") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestExecutedState to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_LatestExecutedState) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Error != nil { + object["error"], err = json.Marshal(a.Error) + if err != nil { + return nil, fmt.Errorf("error marshaling 'error': %w", err) + } + } + + if a.Name != nil { + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + } + + if a.StartedAt != nil { + object["started_at"], err = json.Marshal(a.StartedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'started_at': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error +func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["message"]; found { + err = json.Unmarshal(raw, &a.Message) + if err != nil { + return fmt.Errorf("error reading 'message': %w", err) + } + delete(object, "message") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["stack"]; found { + err = json.Unmarshal(raw, &a.Stack) + if err != nil { + return fmt.Errorf("error reading 'stack': %w", err) + } + delete(object, "stack") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["message"], err = json.Marshal(a.Message) + if err != nil { + return nil, fmt.Errorf("error marshaling 'message': %w", err) + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Stack != nil { + object["stack"], err = json.Marshal(a.Stack) + if err != nil { + return nil, fmt.Errorf("error marshaling 'stack': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item +func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["created_at"]; found { + err = json.Unmarshal(raw, &a.CreatedAt) + if err != nil { + return fmt.Errorf("error reading 'created_at': %w", err) + } + delete(object, "created_at") + } + + if raw, found := object["error"]; found { + err = json.Unmarshal(raw, &a.Error) + if err != nil { + return fmt.Errorf("error reading 'error': %w", err) + } + delete(object, "error") + } + + if raw, found := object["target_version"]; found { + err = json.Unmarshal(raw, &a.TargetVersion) + if err != nil { + return fmt.Errorf("error reading 'target_version': %w", err) + } + delete(object, "target_version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties +func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["created_at"], err = json.Marshal(a.CreatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'created_at': %w", err) + } + + object["error"], err = json.Marshal(a.Error) + if err != nil { + return nil, fmt.Errorf("error marshaling 'error': %w", err) + } + + object["target_version"], err = json.Marshal(a.TargetVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'target_version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_InstallationInfo. Returns the specified +// element and whether it was found +func (a PackageInfo_InstallationInfo) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_InstallationInfo +func (a *PackageInfo_InstallationInfo) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_InstallationInfo to handle AdditionalProperties +func (a *PackageInfo_InstallationInfo) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["additional_spaces_installed_kibana"]; found { + err = json.Unmarshal(raw, &a.AdditionalSpacesInstalledKibana) + if err != nil { + return fmt.Errorf("error reading 'additional_spaces_installed_kibana': %w", err) + } + delete(object, "additional_spaces_installed_kibana") + } + + if raw, found := object["created_at"]; found { + err = json.Unmarshal(raw, &a.CreatedAt) + if err != nil { + return fmt.Errorf("error reading 'created_at': %w", err) + } + delete(object, "created_at") + } + + if raw, found := object["experimental_data_stream_features"]; found { + err = json.Unmarshal(raw, &a.ExperimentalDataStreamFeatures) + if err != nil { + return fmt.Errorf("error reading 'experimental_data_stream_features': %w", err) + } + delete(object, "experimental_data_stream_features") + } + + if raw, found := object["install_format_schema_version"]; found { + err = json.Unmarshal(raw, &a.InstallFormatSchemaVersion) + if err != nil { + return fmt.Errorf("error reading 'install_format_schema_version': %w", err) + } + delete(object, "install_format_schema_version") + } + + if raw, found := object["install_source"]; found { + err = json.Unmarshal(raw, &a.InstallSource) + if err != nil { + return fmt.Errorf("error reading 'install_source': %w", err) + } + delete(object, "install_source") + } + + if raw, found := object["install_status"]; found { + err = json.Unmarshal(raw, &a.InstallStatus) + if err != nil { + return fmt.Errorf("error reading 'install_status': %w", err) + } + delete(object, "install_status") + } + + if raw, found := object["installed_es"]; found { + err = json.Unmarshal(raw, &a.InstalledEs) + if err != nil { + return fmt.Errorf("error reading 'installed_es': %w", err) + } + delete(object, "installed_es") + } + + if raw, found := object["installed_kibana"]; found { + err = json.Unmarshal(raw, &a.InstalledKibana) + if err != nil { + return fmt.Errorf("error reading 'installed_kibana': %w", err) + } + delete(object, "installed_kibana") + } + + if raw, found := object["installed_kibana_space_id"]; found { + err = json.Unmarshal(raw, &a.InstalledKibanaSpaceId) + if err != nil { + return fmt.Errorf("error reading 'installed_kibana_space_id': %w", err) + } + delete(object, "installed_kibana_space_id") + } + + if raw, found := object["latest_executed_state"]; found { + err = json.Unmarshal(raw, &a.LatestExecutedState) + if err != nil { + return fmt.Errorf("error reading 'latest_executed_state': %w", err) + } + delete(object, "latest_executed_state") + } + + if raw, found := object["latest_install_failed_attempts"]; found { + err = json.Unmarshal(raw, &a.LatestInstallFailedAttempts) + if err != nil { + return fmt.Errorf("error reading 'latest_install_failed_attempts': %w", err) + } + delete(object, "latest_install_failed_attempts") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["namespaces"]; found { + err = json.Unmarshal(raw, &a.Namespaces) + if err != nil { + return fmt.Errorf("error reading 'namespaces': %w", err) + } + delete(object, "namespaces") + } + + if raw, found := object["previous_version"]; found { + err = json.Unmarshal(raw, &a.PreviousVersion) + if err != nil { + return fmt.Errorf("error reading 'previous_version': %w", err) + } + delete(object, "previous_version") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["updated_at"]; found { + err = json.Unmarshal(raw, &a.UpdatedAt) + if err != nil { + return fmt.Errorf("error reading 'updated_at': %w", err) + } + delete(object, "updated_at") + } + + if raw, found := object["verification_key_id"]; found { + err = json.Unmarshal(raw, &a.VerificationKeyId) + if err != nil { + return fmt.Errorf("error reading 'verification_key_id': %w", err) + } + delete(object, "verification_key_id") + } + + if raw, found := object["verification_status"]; found { + err = json.Unmarshal(raw, &a.VerificationStatus) + if err != nil { + return fmt.Errorf("error reading 'verification_status': %w", err) + } + delete(object, "verification_status") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_InstallationInfo to handle AdditionalProperties +func (a PackageInfo_InstallationInfo) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AdditionalSpacesInstalledKibana != nil { + object["additional_spaces_installed_kibana"], err = json.Marshal(a.AdditionalSpacesInstalledKibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'additional_spaces_installed_kibana': %w", err) + } + } + + if a.CreatedAt != nil { + object["created_at"], err = json.Marshal(a.CreatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'created_at': %w", err) + } + } + + if a.ExperimentalDataStreamFeatures != nil { + object["experimental_data_stream_features"], err = json.Marshal(a.ExperimentalDataStreamFeatures) + if err != nil { + return nil, fmt.Errorf("error marshaling 'experimental_data_stream_features': %w", err) + } + } + + if a.InstallFormatSchemaVersion != nil { + object["install_format_schema_version"], err = json.Marshal(a.InstallFormatSchemaVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_format_schema_version': %w", err) + } + } + + object["install_source"], err = json.Marshal(a.InstallSource) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_source': %w", err) + } + + object["install_status"], err = json.Marshal(a.InstallStatus) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_status': %w", err) + } + + object["installed_es"], err = json.Marshal(a.InstalledEs) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_es': %w", err) + } + + object["installed_kibana"], err = json.Marshal(a.InstalledKibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_kibana': %w", err) + } + + if a.InstalledKibanaSpaceId != nil { + object["installed_kibana_space_id"], err = json.Marshal(a.InstalledKibanaSpaceId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_kibana_space_id': %w", err) + } + } + + if a.LatestExecutedState != nil { + object["latest_executed_state"], err = json.Marshal(a.LatestExecutedState) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latest_executed_state': %w", err) + } + } + + if a.LatestInstallFailedAttempts != nil { + object["latest_install_failed_attempts"], err = json.Marshal(a.LatestInstallFailedAttempts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latest_install_failed_attempts': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Namespaces != nil { + object["namespaces"], err = json.Marshal(a.Namespaces) + if err != nil { + return nil, fmt.Errorf("error marshaling 'namespaces': %w", err) + } + } + + if a.PreviousVersion != nil { + object["previous_version"], err = json.Marshal(a.PreviousVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'previous_version': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.UpdatedAt != nil { + object["updated_at"], err = json.Marshal(a.UpdatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'updated_at': %w", err) + } + } + + if a.VerificationKeyId != nil { + object["verification_key_id"], err = json.Marshal(a.VerificationKeyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'verification_key_id': %w", err) + } + } + + object["verification_status"], err = json.Marshal(a.VerificationStatus) + if err != nil { + return nil, fmt.Errorf("error marshaling 'verification_status': %w", err) + } + + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Owner. Returns the specified +// element and whether it was found +func (a PackageInfo_Owner) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Owner +func (a *PackageInfo_Owner) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Owner to handle AdditionalProperties +func (a *PackageInfo_Owner) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["github"]; found { + err = json.Unmarshal(raw, &a.Github) + if err != nil { + return fmt.Errorf("error reading 'github': %w", err) + } + delete(object, "github") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Owner to handle AdditionalProperties +func (a PackageInfo_Owner) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Github != nil { + object["github"], err = json.Marshal(a.Github) + if err != nil { + return nil, fmt.Errorf("error marshaling 'github': %w", err) + } + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageInfo_Source. Returns the specified +// element and whether it was found +func (a PackageInfo_Source) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageInfo_Source +func (a *PackageInfo_Source) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageInfo_Source to handle AdditionalProperties +func (a *PackageInfo_Source) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["license"]; found { + err = json.Unmarshal(raw, &a.License) + if err != nil { + return fmt.Errorf("error reading 'license': %w", err) + } + delete(object, "license") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageInfo_Source to handle AdditionalProperties +func (a PackageInfo_Source) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["license"], err = json.Marshal(a.License) + if err != nil { + return nil, fmt.Errorf("error marshaling 'license': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem. Returns the specified +// element and whether it was found +func (a PackageListItem) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem +func (a *PackageListItem) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem to handle AdditionalProperties +func (a *PackageListItem) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["categories"]; found { + err = json.Unmarshal(raw, &a.Categories) + if err != nil { + return fmt.Errorf("error reading 'categories': %w", err) + } + delete(object, "categories") + } + + if raw, found := object["conditions"]; found { + err = json.Unmarshal(raw, &a.Conditions) + if err != nil { + return fmt.Errorf("error reading 'conditions': %w", err) + } + delete(object, "conditions") + } + + if raw, found := object["data_streams"]; found { + err = json.Unmarshal(raw, &a.DataStreams) + if err != nil { + return fmt.Errorf("error reading 'data_streams': %w", err) + } + delete(object, "data_streams") + } + + if raw, found := object["description"]; found { + err = json.Unmarshal(raw, &a.Description) + if err != nil { + return fmt.Errorf("error reading 'description': %w", err) + } + delete(object, "description") + } + + if raw, found := object["discovery"]; found { + err = json.Unmarshal(raw, &a.Discovery) + if err != nil { + return fmt.Errorf("error reading 'discovery': %w", err) + } + delete(object, "discovery") + } + + if raw, found := object["download"]; found { + err = json.Unmarshal(raw, &a.Download) + if err != nil { + return fmt.Errorf("error reading 'download': %w", err) + } + delete(object, "download") + } + + if raw, found := object["format_version"]; found { + err = json.Unmarshal(raw, &a.FormatVersion) + if err != nil { + return fmt.Errorf("error reading 'format_version': %w", err) + } + delete(object, "format_version") + } + + if raw, found := object["icons"]; found { + err = json.Unmarshal(raw, &a.Icons) + if err != nil { + return fmt.Errorf("error reading 'icons': %w", err) + } + delete(object, "icons") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["installationInfo"]; found { + err = json.Unmarshal(raw, &a.InstallationInfo) + if err != nil { + return fmt.Errorf("error reading 'installationInfo': %w", err) + } + delete(object, "installationInfo") + } + + if raw, found := object["integration"]; found { + err = json.Unmarshal(raw, &a.Integration) + if err != nil { + return fmt.Errorf("error reading 'integration': %w", err) + } + delete(object, "integration") + } + + if raw, found := object["internal"]; found { + err = json.Unmarshal(raw, &a.Internal) + if err != nil { + return fmt.Errorf("error reading 'internal': %w", err) + } + delete(object, "internal") + } + + if raw, found := object["latestVersion"]; found { + err = json.Unmarshal(raw, &a.LatestVersion) + if err != nil { + return fmt.Errorf("error reading 'latestVersion': %w", err) + } + delete(object, "latestVersion") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["owner"]; found { + err = json.Unmarshal(raw, &a.Owner) + if err != nil { + return fmt.Errorf("error reading 'owner': %w", err) + } + delete(object, "owner") + } + + if raw, found := object["path"]; found { + err = json.Unmarshal(raw, &a.Path) + if err != nil { + return fmt.Errorf("error reading 'path': %w", err) + } + delete(object, "path") + } + + if raw, found := object["policy_templates"]; found { + err = json.Unmarshal(raw, &a.PolicyTemplates) + if err != nil { + return fmt.Errorf("error reading 'policy_templates': %w", err) + } + delete(object, "policy_templates") + } + + if raw, found := object["readme"]; found { + err = json.Unmarshal(raw, &a.Readme) + if err != nil { + return fmt.Errorf("error reading 'readme': %w", err) + } + delete(object, "readme") + } + + if raw, found := object["release"]; found { + err = json.Unmarshal(raw, &a.Release) + if err != nil { + return fmt.Errorf("error reading 'release': %w", err) + } + delete(object, "release") + } + + if raw, found := object["signature_path"]; found { + err = json.Unmarshal(raw, &a.SignaturePath) + if err != nil { + return fmt.Errorf("error reading 'signature_path': %w", err) + } + delete(object, "signature_path") + } + + if raw, found := object["source"]; found { + err = json.Unmarshal(raw, &a.Source) + if err != nil { + return fmt.Errorf("error reading 'source': %w", err) + } + delete(object, "source") + } + + if raw, found := object["status"]; found { + err = json.Unmarshal(raw, &a.Status) + if err != nil { + return fmt.Errorf("error reading 'status': %w", err) + } + delete(object, "status") + } + + if raw, found := object["title"]; found { + err = json.Unmarshal(raw, &a.Title) + if err != nil { + return fmt.Errorf("error reading 'title': %w", err) + } + delete(object, "title") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["vars"]; found { + err = json.Unmarshal(raw, &a.Vars) + if err != nil { + return fmt.Errorf("error reading 'vars': %w", err) + } + delete(object, "vars") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem to handle AdditionalProperties +func (a PackageListItem) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Categories != nil { + object["categories"], err = json.Marshal(a.Categories) + if err != nil { + return nil, fmt.Errorf("error marshaling 'categories': %w", err) + } + } + + if a.Conditions != nil { + object["conditions"], err = json.Marshal(a.Conditions) + if err != nil { + return nil, fmt.Errorf("error marshaling 'conditions': %w", err) + } + } + + if a.DataStreams != nil { + object["data_streams"], err = json.Marshal(a.DataStreams) + if err != nil { + return nil, fmt.Errorf("error marshaling 'data_streams': %w", err) + } + } + + if a.Description != nil { + object["description"], err = json.Marshal(a.Description) + if err != nil { + return nil, fmt.Errorf("error marshaling 'description': %w", err) + } + } + + if a.Discovery != nil { + object["discovery"], err = json.Marshal(a.Discovery) + if err != nil { + return nil, fmt.Errorf("error marshaling 'discovery': %w", err) + } + } + + if a.Download != nil { + object["download"], err = json.Marshal(a.Download) + if err != nil { + return nil, fmt.Errorf("error marshaling 'download': %w", err) + } + } + + if a.FormatVersion != nil { + object["format_version"], err = json.Marshal(a.FormatVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'format_version': %w", err) + } + } + + if a.Icons != nil { + object["icons"], err = json.Marshal(a.Icons) + if err != nil { + return nil, fmt.Errorf("error marshaling 'icons': %w", err) + } + } + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + if a.InstallationInfo != nil { + object["installationInfo"], err = json.Marshal(a.InstallationInfo) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installationInfo': %w", err) + } + } + + if a.Integration != nil { + object["integration"], err = json.Marshal(a.Integration) + if err != nil { + return nil, fmt.Errorf("error marshaling 'integration': %w", err) + } + } + + if a.Internal != nil { + object["internal"], err = json.Marshal(a.Internal) + if err != nil { + return nil, fmt.Errorf("error marshaling 'internal': %w", err) + } + } + + if a.LatestVersion != nil { + object["latestVersion"], err = json.Marshal(a.LatestVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latestVersion': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Owner != nil { + object["owner"], err = json.Marshal(a.Owner) + if err != nil { + return nil, fmt.Errorf("error marshaling 'owner': %w", err) + } + } + + if a.Path != nil { + object["path"], err = json.Marshal(a.Path) + if err != nil { + return nil, fmt.Errorf("error marshaling 'path': %w", err) + } + } + + if a.PolicyTemplates != nil { + object["policy_templates"], err = json.Marshal(a.PolicyTemplates) + if err != nil { + return nil, fmt.Errorf("error marshaling 'policy_templates': %w", err) + } + } + + if a.Readme != nil { + object["readme"], err = json.Marshal(a.Readme) + if err != nil { + return nil, fmt.Errorf("error marshaling 'readme': %w", err) + } + } + + if a.Release != nil { + object["release"], err = json.Marshal(a.Release) + if err != nil { + return nil, fmt.Errorf("error marshaling 'release': %w", err) + } + } + + if a.SignaturePath != nil { + object["signature_path"], err = json.Marshal(a.SignaturePath) + if err != nil { + return nil, fmt.Errorf("error marshaling 'signature_path': %w", err) + } + } + + if a.Source != nil { + object["source"], err = json.Marshal(a.Source) + if err != nil { + return nil, fmt.Errorf("error marshaling 'source': %w", err) + } + } + + if a.Status != nil { + object["status"], err = json.Marshal(a.Status) + if err != nil { + return nil, fmt.Errorf("error marshaling 'status': %w", err) + } + } + + object["title"], err = json.Marshal(a.Title) + if err != nil { + return nil, fmt.Errorf("error marshaling 'title': %w", err) + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + if a.Vars != nil { + object["vars"], err = json.Marshal(a.Vars) + if err != nil { + return nil, fmt.Errorf("error marshaling 'vars': %w", err) + } + } + + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Conditions_Elastic. Returns the specified +// element and whether it was found +func (a PackageListItem_Conditions_Elastic) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Conditions_Elastic +func (a *PackageListItem_Conditions_Elastic) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Conditions_Elastic to handle AdditionalProperties +func (a *PackageListItem_Conditions_Elastic) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["capabilities"]; found { + err = json.Unmarshal(raw, &a.Capabilities) + if err != nil { + return fmt.Errorf("error reading 'capabilities': %w", err) + } + delete(object, "capabilities") + } + + if raw, found := object["subscription"]; found { + err = json.Unmarshal(raw, &a.Subscription) + if err != nil { + return fmt.Errorf("error reading 'subscription': %w", err) + } + delete(object, "subscription") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Conditions_Elastic to handle AdditionalProperties +func (a PackageListItem_Conditions_Elastic) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Capabilities != nil { + object["capabilities"], err = json.Marshal(a.Capabilities) + if err != nil { + return nil, fmt.Errorf("error marshaling 'capabilities': %w", err) + } + } + + if a.Subscription != nil { + object["subscription"], err = json.Marshal(a.Subscription) + if err != nil { + return nil, fmt.Errorf("error marshaling 'subscription': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Conditions_Kibana. Returns the specified +// element and whether it was found +func (a PackageListItem_Conditions_Kibana) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Conditions_Kibana +func (a *PackageListItem_Conditions_Kibana) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Conditions_Kibana to handle AdditionalProperties +func (a *PackageListItem_Conditions_Kibana) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Conditions_Kibana to handle AdditionalProperties +func (a PackageListItem_Conditions_Kibana) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Version != nil { + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Conditions. Returns the specified +// element and whether it was found +func (a PackageListItem_Conditions) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Conditions +func (a *PackageListItem_Conditions) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Conditions to handle AdditionalProperties +func (a *PackageListItem_Conditions) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["elastic"]; found { + err = json.Unmarshal(raw, &a.Elastic) + if err != nil { + return fmt.Errorf("error reading 'elastic': %w", err) + } + delete(object, "elastic") + } + + if raw, found := object["kibana"]; found { + err = json.Unmarshal(raw, &a.Kibana) + if err != nil { + return fmt.Errorf("error reading 'kibana': %w", err) + } + delete(object, "kibana") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Conditions to handle AdditionalProperties +func (a PackageListItem_Conditions) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Elastic != nil { + object["elastic"], err = json.Marshal(a.Elastic) + if err != nil { + return nil, fmt.Errorf("error marshaling 'elastic': %w", err) + } + } + + if a.Kibana != nil { + object["kibana"], err = json.Marshal(a.Kibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'kibana': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Discovery_Datasets_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_Discovery_Datasets_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Discovery_Datasets_Item +func (a *PackageListItem_Discovery_Datasets_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Discovery_Datasets_Item to handle AdditionalProperties +func (a *PackageListItem_Discovery_Datasets_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Discovery_Datasets_Item to handle AdditionalProperties +func (a PackageListItem_Discovery_Datasets_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Discovery_Fields_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_Discovery_Fields_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Discovery_Fields_Item +func (a *PackageListItem_Discovery_Fields_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Discovery_Fields_Item to handle AdditionalProperties +func (a *PackageListItem_Discovery_Fields_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Discovery_Fields_Item to handle AdditionalProperties +func (a PackageListItem_Discovery_Fields_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Discovery. Returns the specified +// element and whether it was found +func (a PackageListItem_Discovery) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Discovery +func (a *PackageListItem_Discovery) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Discovery to handle AdditionalProperties +func (a *PackageListItem_Discovery) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["datasets"]; found { + err = json.Unmarshal(raw, &a.Datasets) + if err != nil { + return fmt.Errorf("error reading 'datasets': %w", err) + } + delete(object, "datasets") + } + + if raw, found := object["fields"]; found { + err = json.Unmarshal(raw, &a.Fields) + if err != nil { + return fmt.Errorf("error reading 'fields': %w", err) + } + delete(object, "fields") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Discovery to handle AdditionalProperties +func (a PackageListItem_Discovery) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Datasets != nil { + object["datasets"], err = json.Marshal(a.Datasets) + if err != nil { + return nil, fmt.Errorf("error marshaling 'datasets': %w", err) + } + } + + if a.Fields != nil { + object["fields"], err = json.Marshal(a.Fields) + if err != nil { + return nil, fmt.Errorf("error marshaling 'fields': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Icons_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_Icons_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Icons_Item +func (a *PackageListItem_Icons_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Icons_Item to handle AdditionalProperties +func (a *PackageListItem_Icons_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["dark_mode"]; found { + err = json.Unmarshal(raw, &a.DarkMode) + if err != nil { + return fmt.Errorf("error reading 'dark_mode': %w", err) + } + delete(object, "dark_mode") + } + + if raw, found := object["path"]; found { + err = json.Unmarshal(raw, &a.Path) + if err != nil { + return fmt.Errorf("error reading 'path': %w", err) + } + delete(object, "path") + } + + if raw, found := object["size"]; found { + err = json.Unmarshal(raw, &a.Size) + if err != nil { + return fmt.Errorf("error reading 'size': %w", err) + } + delete(object, "size") + } + + if raw, found := object["src"]; found { + err = json.Unmarshal(raw, &a.Src) + if err != nil { + return fmt.Errorf("error reading 'src': %w", err) + } + delete(object, "src") + } + + if raw, found := object["title"]; found { + err = json.Unmarshal(raw, &a.Title) + if err != nil { + return fmt.Errorf("error reading 'title': %w", err) + } + delete(object, "title") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Icons_Item to handle AdditionalProperties +func (a PackageListItem_Icons_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.DarkMode != nil { + object["dark_mode"], err = json.Marshal(a.DarkMode) + if err != nil { + return nil, fmt.Errorf("error marshaling 'dark_mode': %w", err) + } + } + + if a.Path != nil { + object["path"], err = json.Marshal(a.Path) + if err != nil { + return nil, fmt.Errorf("error marshaling 'path': %w", err) + } + } + + if a.Size != nil { + object["size"], err = json.Marshal(a.Size) + if err != nil { + return nil, fmt.Errorf("error marshaling 'size': %w", err) + } + } + + object["src"], err = json.Marshal(a.Src) + if err != nil { + return nil, fmt.Errorf("error marshaling 'src': %w", err) + } + + if a.Title != nil { + object["title"], err = json.Marshal(a.Title) + if err != nil { + return nil, fmt.Errorf("error marshaling 'title': %w", err) + } + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item +func (a *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["originId"]; found { + err = json.Unmarshal(raw, &a.OriginId) + if err != nil { + return fmt.Errorf("error reading 'originId': %w", err) + } + delete(object, "originId") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + if a.OriginId != nil { + object["originId"], err = json.Marshal(a.OriginId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'originId': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features +func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["doc_value_only_numeric"]; found { + err = json.Unmarshal(raw, &a.DocValueOnlyNumeric) + if err != nil { + return fmt.Errorf("error reading 'doc_value_only_numeric': %w", err) + } + delete(object, "doc_value_only_numeric") + } + + if raw, found := object["doc_value_only_other"]; found { + err = json.Unmarshal(raw, &a.DocValueOnlyOther) + if err != nil { + return fmt.Errorf("error reading 'doc_value_only_other': %w", err) + } + delete(object, "doc_value_only_other") + } + + if raw, found := object["synthetic_source"]; found { + err = json.Unmarshal(raw, &a.SyntheticSource) + if err != nil { + return fmt.Errorf("error reading 'synthetic_source': %w", err) + } + delete(object, "synthetic_source") + } + + if raw, found := object["tsdb"]; found { + err = json.Unmarshal(raw, &a.Tsdb) + if err != nil { + return fmt.Errorf("error reading 'tsdb': %w", err) + } + delete(object, "tsdb") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.DocValueOnlyNumeric != nil { + object["doc_value_only_numeric"], err = json.Marshal(a.DocValueOnlyNumeric) + if err != nil { + return nil, fmt.Errorf("error marshaling 'doc_value_only_numeric': %w", err) + } + } + + if a.DocValueOnlyOther != nil { + object["doc_value_only_other"], err = json.Marshal(a.DocValueOnlyOther) + if err != nil { + return nil, fmt.Errorf("error marshaling 'doc_value_only_other': %w", err) + } + } + + if a.SyntheticSource != nil { + object["synthetic_source"], err = json.Marshal(a.SyntheticSource) + if err != nil { + return nil, fmt.Errorf("error marshaling 'synthetic_source': %w", err) + } + } + + if a.Tsdb != nil { + object["tsdb"], err = json.Marshal(a.Tsdb) + if err != nil { + return nil, fmt.Errorf("error marshaling 'tsdb': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item +func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["data_stream"]; found { + err = json.Unmarshal(raw, &a.DataStream) + if err != nil { + return fmt.Errorf("error reading 'data_stream': %w", err) + } + delete(object, "data_stream") + } + + if raw, found := object["features"]; found { + err = json.Unmarshal(raw, &a.Features) + if err != nil { + return fmt.Errorf("error reading 'features': %w", err) + } + delete(object, "features") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["data_stream"], err = json.Marshal(a.DataStream) + if err != nil { + return nil, fmt.Errorf("error marshaling 'data_stream': %w", err) + } + + object["features"], err = json.Marshal(a.Features) + if err != nil { + return nil, fmt.Errorf("error marshaling 'features': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_InstalledEs_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_InstalledEs_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_InstalledEs_Item +func (a *PackageListItem_InstallationInfo_InstalledEs_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_InstalledEs_Item to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_InstalledEs_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["deferred"]; found { + err = json.Unmarshal(raw, &a.Deferred) + if err != nil { + return fmt.Errorf("error reading 'deferred': %w", err) + } + delete(object, "deferred") + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_InstalledEs_Item to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_InstalledEs_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Deferred != nil { + object["deferred"], err = json.Marshal(a.Deferred) + if err != nil { + return nil, fmt.Errorf("error marshaling 'deferred': %w", err) + } + } + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.Version != nil { + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_InstalledKibana_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_InstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_InstalledKibana_Item +func (a *PackageListItem_InstallationInfo_InstalledKibana_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_InstalledKibana_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["id"]; found { + err = json.Unmarshal(raw, &a.Id) + if err != nil { + return fmt.Errorf("error reading 'id': %w", err) + } + delete(object, "id") + } + + if raw, found := object["originId"]; found { + err = json.Unmarshal(raw, &a.OriginId) + if err != nil { + return fmt.Errorf("error reading 'originId': %w", err) + } + delete(object, "originId") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_InstalledKibana_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["id"], err = json.Marshal(a.Id) + if err != nil { + return nil, fmt.Errorf("error marshaling 'id': %w", err) + } + + if a.OriginId != nil { + object["originId"], err = json.Marshal(a.OriginId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'originId': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_LatestExecutedState. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_LatestExecutedState) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_LatestExecutedState +func (a *PackageListItem_InstallationInfo_LatestExecutedState) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestExecutedState to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_LatestExecutedState) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["error"]; found { + err = json.Unmarshal(raw, &a.Error) + if err != nil { + return fmt.Errorf("error reading 'error': %w", err) + } + delete(object, "error") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["started_at"]; found { + err = json.Unmarshal(raw, &a.StartedAt) + if err != nil { + return fmt.Errorf("error reading 'started_at': %w", err) + } + delete(object, "started_at") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestExecutedState to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_LatestExecutedState) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Error != nil { + object["error"], err = json.Marshal(a.Error) + if err != nil { + return nil, fmt.Errorf("error marshaling 'error': %w", err) + } + } + + if a.Name != nil { + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + } + + if a.StartedAt != nil { + object["started_at"], err = json.Marshal(a.StartedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'started_at': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error +func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["message"]; found { + err = json.Unmarshal(raw, &a.Message) + if err != nil { + return fmt.Errorf("error reading 'message': %w", err) + } + delete(object, "message") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["stack"]; found { + err = json.Unmarshal(raw, &a.Stack) + if err != nil { + return fmt.Errorf("error reading 'stack': %w", err) + } + delete(object, "stack") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["message"], err = json.Marshal(a.Message) + if err != nil { + return nil, fmt.Errorf("error marshaling 'message': %w", err) + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Stack != nil { + object["stack"], err = json.Marshal(a.Stack) + if err != nil { + return nil, fmt.Errorf("error marshaling 'stack': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item +func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["created_at"]; found { + err = json.Unmarshal(raw, &a.CreatedAt) + if err != nil { + return fmt.Errorf("error reading 'created_at': %w", err) + } + delete(object, "created_at") + } + + if raw, found := object["error"]; found { + err = json.Unmarshal(raw, &a.Error) + if err != nil { + return fmt.Errorf("error reading 'error': %w", err) + } + delete(object, "error") + } + + if raw, found := object["target_version"]; found { + err = json.Unmarshal(raw, &a.TargetVersion) + if err != nil { + return fmt.Errorf("error reading 'target_version': %w", err) + } + delete(object, "target_version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties +func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["created_at"], err = json.Marshal(a.CreatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'created_at': %w", err) + } + + object["error"], err = json.Marshal(a.Error) + if err != nil { + return nil, fmt.Errorf("error marshaling 'error': %w", err) + } + + object["target_version"], err = json.Marshal(a.TargetVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'target_version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_InstallationInfo. Returns the specified +// element and whether it was found +func (a PackageListItem_InstallationInfo) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_InstallationInfo +func (a *PackageListItem_InstallationInfo) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_InstallationInfo to handle AdditionalProperties +func (a *PackageListItem_InstallationInfo) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["additional_spaces_installed_kibana"]; found { + err = json.Unmarshal(raw, &a.AdditionalSpacesInstalledKibana) + if err != nil { + return fmt.Errorf("error reading 'additional_spaces_installed_kibana': %w", err) + } + delete(object, "additional_spaces_installed_kibana") + } + + if raw, found := object["created_at"]; found { + err = json.Unmarshal(raw, &a.CreatedAt) + if err != nil { + return fmt.Errorf("error reading 'created_at': %w", err) + } + delete(object, "created_at") + } + + if raw, found := object["experimental_data_stream_features"]; found { + err = json.Unmarshal(raw, &a.ExperimentalDataStreamFeatures) + if err != nil { + return fmt.Errorf("error reading 'experimental_data_stream_features': %w", err) + } + delete(object, "experimental_data_stream_features") + } + + if raw, found := object["install_format_schema_version"]; found { + err = json.Unmarshal(raw, &a.InstallFormatSchemaVersion) + if err != nil { + return fmt.Errorf("error reading 'install_format_schema_version': %w", err) + } + delete(object, "install_format_schema_version") + } + + if raw, found := object["install_source"]; found { + err = json.Unmarshal(raw, &a.InstallSource) + if err != nil { + return fmt.Errorf("error reading 'install_source': %w", err) + } + delete(object, "install_source") + } + + if raw, found := object["install_status"]; found { + err = json.Unmarshal(raw, &a.InstallStatus) + if err != nil { + return fmt.Errorf("error reading 'install_status': %w", err) + } + delete(object, "install_status") + } + + if raw, found := object["installed_es"]; found { + err = json.Unmarshal(raw, &a.InstalledEs) + if err != nil { + return fmt.Errorf("error reading 'installed_es': %w", err) + } + delete(object, "installed_es") + } + + if raw, found := object["installed_kibana"]; found { + err = json.Unmarshal(raw, &a.InstalledKibana) + if err != nil { + return fmt.Errorf("error reading 'installed_kibana': %w", err) + } + delete(object, "installed_kibana") + } + + if raw, found := object["installed_kibana_space_id"]; found { + err = json.Unmarshal(raw, &a.InstalledKibanaSpaceId) + if err != nil { + return fmt.Errorf("error reading 'installed_kibana_space_id': %w", err) + } + delete(object, "installed_kibana_space_id") + } + + if raw, found := object["latest_executed_state"]; found { + err = json.Unmarshal(raw, &a.LatestExecutedState) + if err != nil { + return fmt.Errorf("error reading 'latest_executed_state': %w", err) + } + delete(object, "latest_executed_state") + } + + if raw, found := object["latest_install_failed_attempts"]; found { + err = json.Unmarshal(raw, &a.LatestInstallFailedAttempts) + if err != nil { + return fmt.Errorf("error reading 'latest_install_failed_attempts': %w", err) + } + delete(object, "latest_install_failed_attempts") + } + + if raw, found := object["name"]; found { + err = json.Unmarshal(raw, &a.Name) + if err != nil { + return fmt.Errorf("error reading 'name': %w", err) + } + delete(object, "name") + } + + if raw, found := object["namespaces"]; found { + err = json.Unmarshal(raw, &a.Namespaces) + if err != nil { + return fmt.Errorf("error reading 'namespaces': %w", err) + } + delete(object, "namespaces") + } + + if raw, found := object["previous_version"]; found { + err = json.Unmarshal(raw, &a.PreviousVersion) + if err != nil { + return fmt.Errorf("error reading 'previous_version': %w", err) + } + delete(object, "previous_version") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if raw, found := object["updated_at"]; found { + err = json.Unmarshal(raw, &a.UpdatedAt) + if err != nil { + return fmt.Errorf("error reading 'updated_at': %w", err) + } + delete(object, "updated_at") + } + + if raw, found := object["verification_key_id"]; found { + err = json.Unmarshal(raw, &a.VerificationKeyId) + if err != nil { + return fmt.Errorf("error reading 'verification_key_id': %w", err) + } + delete(object, "verification_key_id") + } + + if raw, found := object["verification_status"]; found { + err = json.Unmarshal(raw, &a.VerificationStatus) + if err != nil { + return fmt.Errorf("error reading 'verification_status': %w", err) + } + delete(object, "verification_status") + } + + if raw, found := object["version"]; found { + err = json.Unmarshal(raw, &a.Version) + if err != nil { + return fmt.Errorf("error reading 'version': %w", err) + } + delete(object, "version") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_InstallationInfo to handle AdditionalProperties +func (a PackageListItem_InstallationInfo) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.AdditionalSpacesInstalledKibana != nil { + object["additional_spaces_installed_kibana"], err = json.Marshal(a.AdditionalSpacesInstalledKibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'additional_spaces_installed_kibana': %w", err) + } + } + + if a.CreatedAt != nil { + object["created_at"], err = json.Marshal(a.CreatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'created_at': %w", err) + } + } + + if a.ExperimentalDataStreamFeatures != nil { + object["experimental_data_stream_features"], err = json.Marshal(a.ExperimentalDataStreamFeatures) + if err != nil { + return nil, fmt.Errorf("error marshaling 'experimental_data_stream_features': %w", err) + } + } + + if a.InstallFormatSchemaVersion != nil { + object["install_format_schema_version"], err = json.Marshal(a.InstallFormatSchemaVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_format_schema_version': %w", err) + } + } + + object["install_source"], err = json.Marshal(a.InstallSource) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_source': %w", err) + } + + object["install_status"], err = json.Marshal(a.InstallStatus) + if err != nil { + return nil, fmt.Errorf("error marshaling 'install_status': %w", err) + } + + object["installed_es"], err = json.Marshal(a.InstalledEs) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_es': %w", err) + } + + object["installed_kibana"], err = json.Marshal(a.InstalledKibana) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_kibana': %w", err) + } + + if a.InstalledKibanaSpaceId != nil { + object["installed_kibana_space_id"], err = json.Marshal(a.InstalledKibanaSpaceId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'installed_kibana_space_id': %w", err) + } + } + + if a.LatestExecutedState != nil { + object["latest_executed_state"], err = json.Marshal(a.LatestExecutedState) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latest_executed_state': %w", err) + } + } + + if a.LatestInstallFailedAttempts != nil { + object["latest_install_failed_attempts"], err = json.Marshal(a.LatestInstallFailedAttempts) + if err != nil { + return nil, fmt.Errorf("error marshaling 'latest_install_failed_attempts': %w", err) + } + } + + object["name"], err = json.Marshal(a.Name) + if err != nil { + return nil, fmt.Errorf("error marshaling 'name': %w", err) + } + + if a.Namespaces != nil { + object["namespaces"], err = json.Marshal(a.Namespaces) + if err != nil { + return nil, fmt.Errorf("error marshaling 'namespaces': %w", err) + } + } + + if a.PreviousVersion != nil { + object["previous_version"], err = json.Marshal(a.PreviousVersion) + if err != nil { + return nil, fmt.Errorf("error marshaling 'previous_version': %w", err) + } + } + + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + + if a.UpdatedAt != nil { + object["updated_at"], err = json.Marshal(a.UpdatedAt) + if err != nil { + return nil, fmt.Errorf("error marshaling 'updated_at': %w", err) + } + } + + if a.VerificationKeyId != nil { + object["verification_key_id"], err = json.Marshal(a.VerificationKeyId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'verification_key_id': %w", err) + } + } + + object["verification_status"], err = json.Marshal(a.VerificationStatus) + if err != nil { + return nil, fmt.Errorf("error marshaling 'verification_status': %w", err) + } + + object["version"], err = json.Marshal(a.Version) + if err != nil { + return nil, fmt.Errorf("error marshaling 'version': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Owner. Returns the specified +// element and whether it was found +func (a PackageListItem_Owner) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Owner +func (a *PackageListItem_Owner) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Owner to handle AdditionalProperties +func (a *PackageListItem_Owner) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["github"]; found { + err = json.Unmarshal(raw, &a.Github) + if err != nil { + return fmt.Errorf("error reading 'github': %w", err) + } + delete(object, "github") + } + + if raw, found := object["type"]; found { + err = json.Unmarshal(raw, &a.Type) + if err != nil { + return fmt.Errorf("error reading 'type': %w", err) + } + delete(object, "type") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Owner to handle AdditionalProperties +func (a PackageListItem_Owner) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Github != nil { + object["github"], err = json.Marshal(a.Github) + if err != nil { + return nil, fmt.Errorf("error marshaling 'github': %w", err) + } + } + + if a.Type != nil { + object["type"], err = json.Marshal(a.Type) + if err != nil { + return nil, fmt.Errorf("error marshaling 'type': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackageListItem_Source. Returns the specified +// element and whether it was found +func (a PackageListItem_Source) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackageListItem_Source +func (a *PackageListItem_Source) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackageListItem_Source to handle AdditionalProperties +func (a *PackageListItem_Source) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["license"]; found { + err = json.Unmarshal(raw, &a.License) + if err != nil { + return fmt.Errorf("error reading 'license': %w", err) + } + delete(object, "license") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackageListItem_Source to handle AdditionalProperties +func (a PackageListItem_Source) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + object["license"], err = json.Marshal(a.License) + if err != nil { + return nil, fmt.Errorf("error marshaling 'license': %w", err) + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackagePolicy_Elasticsearch_Privileges. Returns the specified +// element and whether it was found +func (a PackagePolicy_Elasticsearch_Privileges) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackagePolicy_Elasticsearch_Privileges +func (a *PackagePolicy_Elasticsearch_Privileges) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackagePolicy_Elasticsearch_Privileges to handle AdditionalProperties +func (a *PackagePolicy_Elasticsearch_Privileges) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["cluster"]; found { + err = json.Unmarshal(raw, &a.Cluster) + if err != nil { + return fmt.Errorf("error reading 'cluster': %w", err) + } + delete(object, "cluster") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackagePolicy_Elasticsearch_Privileges to handle AdditionalProperties +func (a PackagePolicy_Elasticsearch_Privileges) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Cluster != nil { + object["cluster"], err = json.Marshal(a.Cluster) + if err != nil { + return nil, fmt.Errorf("error marshaling 'cluster': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for PackagePolicy_Elasticsearch. Returns the specified +// element and whether it was found +func (a PackagePolicy_Elasticsearch) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for PackagePolicy_Elasticsearch +func (a *PackagePolicy_Elasticsearch) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Override default JSON handling for PackagePolicy_Elasticsearch to handle AdditionalProperties +func (a *PackagePolicy_Elasticsearch) UnmarshalJSON(b []byte) error { + object := make(map[string]json.RawMessage) + err := json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["privileges"]; found { + err = json.Unmarshal(raw, &a.Privileges) + if err != nil { + return fmt.Errorf("error reading 'privileges': %w", err) + } + delete(object, "privileges") + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for PackagePolicy_Elasticsearch to handle AdditionalProperties +func (a PackagePolicy_Elasticsearch) MarshalJSON() ([]byte, error) { + var err error + object := make(map[string]json.RawMessage) + + if a.Privileges != nil { + object["privileges"], err = json.Marshal(a.Privileges) + if err != nil { + return nil, fmt.Errorf("error marshaling 'privileges': %w", err) + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Getter for additional properties for UpdateConnectorConfig. Returns the specified +// element and whether it was found +func (a UpdateConnectorConfig) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for UpdateConnectorConfig +func (a *UpdateConnectorConfig) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// Getter for additional properties for UpdateConnectorSecrets. Returns the specified +// element and whether it was found +func (a UpdateConnectorSecrets) Get(fieldName string) (value interface{}, found bool) { + if a.AdditionalProperties != nil { + value, found = a.AdditionalProperties[fieldName] + } + return +} + +// Setter for additional properties for UpdateConnectorSecrets +func (a *UpdateConnectorSecrets) Set(fieldName string, value interface{}) { + if a.AdditionalProperties == nil { + a.AdditionalProperties = make(map[string]interface{}) + } + a.AdditionalProperties[fieldName] = value +} + +// AsCasesAddAlertCommentRequestProperties returns the union data inside the CasesAddCaseCommentRequest as a CasesAddAlertCommentRequestProperties +func (t CasesAddCaseCommentRequest) AsCasesAddAlertCommentRequestProperties() (CasesAddAlertCommentRequestProperties, error) { + var body CasesAddAlertCommentRequestProperties + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesAddAlertCommentRequestProperties overwrites any union data inside the CasesAddCaseCommentRequest as the provided CasesAddAlertCommentRequestProperties +func (t *CasesAddCaseCommentRequest) FromCasesAddAlertCommentRequestProperties(v CasesAddAlertCommentRequestProperties) error { + v.Type = "alert" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesAddAlertCommentRequestProperties performs a merge with any union data inside the CasesAddCaseCommentRequest, using the provided CasesAddAlertCommentRequestProperties +func (t *CasesAddCaseCommentRequest) MergeCasesAddAlertCommentRequestProperties(v CasesAddAlertCommentRequestProperties) error { + v.Type = "alert" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesAddUserCommentRequestProperties returns the union data inside the CasesAddCaseCommentRequest as a CasesAddUserCommentRequestProperties +func (t CasesAddCaseCommentRequest) AsCasesAddUserCommentRequestProperties() (CasesAddUserCommentRequestProperties, error) { + var body CasesAddUserCommentRequestProperties + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesAddUserCommentRequestProperties overwrites any union data inside the CasesAddCaseCommentRequest as the provided CasesAddUserCommentRequestProperties +func (t *CasesAddCaseCommentRequest) FromCasesAddUserCommentRequestProperties(v CasesAddUserCommentRequestProperties) error { + v.Type = "user" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesAddUserCommentRequestProperties performs a merge with any union data inside the CasesAddCaseCommentRequest, using the provided CasesAddUserCommentRequestProperties +func (t *CasesAddCaseCommentRequest) MergeCasesAddUserCommentRequestProperties(v CasesAddUserCommentRequestProperties) error { + v.Type = "user" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesAddCaseCommentRequest) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"type"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err +} + +func (t CasesAddCaseCommentRequest) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case "alert": + return t.AsCasesAddAlertCommentRequestProperties() + case "user": + return t.AsCasesAddUserCommentRequestProperties() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } +} + +func (t CasesAddCaseCommentRequest) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesAddCaseCommentRequest) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesAlertIdentifiers0 returns the union data inside the CasesAlertIdentifiers as a CasesAlertIdentifiers0 +func (t CasesAlertIdentifiers) AsCasesAlertIdentifiers0() (CasesAlertIdentifiers0, error) { + var body CasesAlertIdentifiers0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesAlertIdentifiers0 overwrites any union data inside the CasesAlertIdentifiers as the provided CasesAlertIdentifiers0 +func (t *CasesAlertIdentifiers) FromCasesAlertIdentifiers0(v CasesAlertIdentifiers0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesAlertIdentifiers0 performs a merge with any union data inside the CasesAlertIdentifiers, using the provided CasesAlertIdentifiers0 +func (t *CasesAlertIdentifiers) MergeCasesAlertIdentifiers0(v CasesAlertIdentifiers0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesAlertIdentifiers1 returns the union data inside the CasesAlertIdentifiers as a CasesAlertIdentifiers1 +func (t CasesAlertIdentifiers) AsCasesAlertIdentifiers1() (CasesAlertIdentifiers1, error) { + var body CasesAlertIdentifiers1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesAlertIdentifiers1 overwrites any union data inside the CasesAlertIdentifiers as the provided CasesAlertIdentifiers1 +func (t *CasesAlertIdentifiers) FromCasesAlertIdentifiers1(v CasesAlertIdentifiers1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesAlertIdentifiers1 performs a merge with any union data inside the CasesAlertIdentifiers, using the provided CasesAlertIdentifiers1 +func (t *CasesAlertIdentifiers) MergeCasesAlertIdentifiers1(v CasesAlertIdentifiers1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesAlertIdentifiers) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesAlertIdentifiers) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesAlertIndices0 returns the union data inside the CasesAlertIndices as a CasesAlertIndices0 +func (t CasesAlertIndices) AsCasesAlertIndices0() (CasesAlertIndices0, error) { + var body CasesAlertIndices0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesAlertIndices0 overwrites any union data inside the CasesAlertIndices as the provided CasesAlertIndices0 +func (t *CasesAlertIndices) FromCasesAlertIndices0(v CasesAlertIndices0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesAlertIndices0 performs a merge with any union data inside the CasesAlertIndices, using the provided CasesAlertIndices0 +func (t *CasesAlertIndices) MergeCasesAlertIndices0(v CasesAlertIndices0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesAlertIndices1 returns the union data inside the CasesAlertIndices as a CasesAlertIndices1 +func (t CasesAlertIndices) AsCasesAlertIndices1() (CasesAlertIndices1, error) { + var body CasesAlertIndices1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesAlertIndices1 overwrites any union data inside the CasesAlertIndices as the provided CasesAlertIndices1 +func (t *CasesAlertIndices) FromCasesAlertIndices1(v CasesAlertIndices1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesAlertIndices1 performs a merge with any union data inside the CasesAlertIndices, using the provided CasesAlertIndices1 +func (t *CasesAlertIndices) MergeCasesAlertIndices1(v CasesAlertIndices1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesAlertIndices) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesAlertIndices) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesAlertCommentResponseProperties returns the union data inside the CasesCaseResponseProperties_Comments_Item as a CasesAlertCommentResponseProperties +func (t CasesCaseResponseProperties_Comments_Item) AsCasesAlertCommentResponseProperties() (CasesAlertCommentResponseProperties, error) { + var body CasesAlertCommentResponseProperties + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesAlertCommentResponseProperties overwrites any union data inside the CasesCaseResponseProperties_Comments_Item as the provided CasesAlertCommentResponseProperties +func (t *CasesCaseResponseProperties_Comments_Item) FromCasesAlertCommentResponseProperties(v CasesAlertCommentResponseProperties) error { + v.Type = "alert" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesAlertCommentResponseProperties performs a merge with any union data inside the CasesCaseResponseProperties_Comments_Item, using the provided CasesAlertCommentResponseProperties +func (t *CasesCaseResponseProperties_Comments_Item) MergeCasesAlertCommentResponseProperties(v CasesAlertCommentResponseProperties) error { + v.Type = "alert" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesUserCommentResponseProperties returns the union data inside the CasesCaseResponseProperties_Comments_Item as a CasesUserCommentResponseProperties +func (t CasesCaseResponseProperties_Comments_Item) AsCasesUserCommentResponseProperties() (CasesUserCommentResponseProperties, error) { + var body CasesUserCommentResponseProperties + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesUserCommentResponseProperties overwrites any union data inside the CasesCaseResponseProperties_Comments_Item as the provided CasesUserCommentResponseProperties +func (t *CasesCaseResponseProperties_Comments_Item) FromCasesUserCommentResponseProperties(v CasesUserCommentResponseProperties) error { + v.Type = "user" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesUserCommentResponseProperties performs a merge with any union data inside the CasesCaseResponseProperties_Comments_Item, using the provided CasesUserCommentResponseProperties +func (t *CasesCaseResponseProperties_Comments_Item) MergeCasesUserCommentResponseProperties(v CasesUserCommentResponseProperties) error { + v.Type = "user" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesCaseResponseProperties_Comments_Item) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"type"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err +} + +func (t CasesCaseResponseProperties_Comments_Item) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case "alert": + return t.AsCasesAlertCommentResponseProperties() + case "user": + return t.AsCasesUserCommentResponseProperties() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } +} + +func (t CasesCaseResponseProperties_Comments_Item) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesCaseResponseProperties_Comments_Item) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesConnectorPropertiesNone returns the union data inside the CasesCaseResponseProperties_Connector as a CasesConnectorPropertiesNone +func (t CasesCaseResponseProperties_Connector) AsCasesConnectorPropertiesNone() (CasesConnectorPropertiesNone, error) { + var body CasesConnectorPropertiesNone + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesNone overwrites any union data inside the CasesCaseResponseProperties_Connector as the provided CasesConnectorPropertiesNone +func (t *CasesCaseResponseProperties_Connector) FromCasesConnectorPropertiesNone(v CasesConnectorPropertiesNone) error { + v.Type = ".none" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesNone performs a merge with any union data inside the CasesCaseResponseProperties_Connector, using the provided CasesConnectorPropertiesNone +func (t *CasesCaseResponseProperties_Connector) MergeCasesConnectorPropertiesNone(v CasesConnectorPropertiesNone) error { + v.Type = ".none" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesCasesWebhook returns the union data inside the CasesCaseResponseProperties_Connector as a CasesConnectorPropertiesCasesWebhook +func (t CasesCaseResponseProperties_Connector) AsCasesConnectorPropertiesCasesWebhook() (CasesConnectorPropertiesCasesWebhook, error) { + var body CasesConnectorPropertiesCasesWebhook + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesCasesWebhook overwrites any union data inside the CasesCaseResponseProperties_Connector as the provided CasesConnectorPropertiesCasesWebhook +func (t *CasesCaseResponseProperties_Connector) FromCasesConnectorPropertiesCasesWebhook(v CasesConnectorPropertiesCasesWebhook) error { + v.Type = ".cases-webhook" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesCasesWebhook performs a merge with any union data inside the CasesCaseResponseProperties_Connector, using the provided CasesConnectorPropertiesCasesWebhook +func (t *CasesCaseResponseProperties_Connector) MergeCasesConnectorPropertiesCasesWebhook(v CasesConnectorPropertiesCasesWebhook) error { + v.Type = ".cases-webhook" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesJira returns the union data inside the CasesCaseResponseProperties_Connector as a CasesConnectorPropertiesJira +func (t CasesCaseResponseProperties_Connector) AsCasesConnectorPropertiesJira() (CasesConnectorPropertiesJira, error) { + var body CasesConnectorPropertiesJira + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesJira overwrites any union data inside the CasesCaseResponseProperties_Connector as the provided CasesConnectorPropertiesJira +func (t *CasesCaseResponseProperties_Connector) FromCasesConnectorPropertiesJira(v CasesConnectorPropertiesJira) error { + v.Type = ".jira" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesJira performs a merge with any union data inside the CasesCaseResponseProperties_Connector, using the provided CasesConnectorPropertiesJira +func (t *CasesCaseResponseProperties_Connector) MergeCasesConnectorPropertiesJira(v CasesConnectorPropertiesJira) error { + v.Type = ".jira" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesResilient returns the union data inside the CasesCaseResponseProperties_Connector as a CasesConnectorPropertiesResilient +func (t CasesCaseResponseProperties_Connector) AsCasesConnectorPropertiesResilient() (CasesConnectorPropertiesResilient, error) { + var body CasesConnectorPropertiesResilient + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesResilient overwrites any union data inside the CasesCaseResponseProperties_Connector as the provided CasesConnectorPropertiesResilient +func (t *CasesCaseResponseProperties_Connector) FromCasesConnectorPropertiesResilient(v CasesConnectorPropertiesResilient) error { + v.Type = ".resilient" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesResilient performs a merge with any union data inside the CasesCaseResponseProperties_Connector, using the provided CasesConnectorPropertiesResilient +func (t *CasesCaseResponseProperties_Connector) MergeCasesConnectorPropertiesResilient(v CasesConnectorPropertiesResilient) error { + v.Type = ".resilient" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesServicenow returns the union data inside the CasesCaseResponseProperties_Connector as a CasesConnectorPropertiesServicenow +func (t CasesCaseResponseProperties_Connector) AsCasesConnectorPropertiesServicenow() (CasesConnectorPropertiesServicenow, error) { + var body CasesConnectorPropertiesServicenow + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesServicenow overwrites any union data inside the CasesCaseResponseProperties_Connector as the provided CasesConnectorPropertiesServicenow +func (t *CasesCaseResponseProperties_Connector) FromCasesConnectorPropertiesServicenow(v CasesConnectorPropertiesServicenow) error { + v.Type = ".servicenow" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesServicenow performs a merge with any union data inside the CasesCaseResponseProperties_Connector, using the provided CasesConnectorPropertiesServicenow +func (t *CasesCaseResponseProperties_Connector) MergeCasesConnectorPropertiesServicenow(v CasesConnectorPropertiesServicenow) error { + v.Type = ".servicenow" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesServicenowSir returns the union data inside the CasesCaseResponseProperties_Connector as a CasesConnectorPropertiesServicenowSir +func (t CasesCaseResponseProperties_Connector) AsCasesConnectorPropertiesServicenowSir() (CasesConnectorPropertiesServicenowSir, error) { + var body CasesConnectorPropertiesServicenowSir + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesServicenowSir overwrites any union data inside the CasesCaseResponseProperties_Connector as the provided CasesConnectorPropertiesServicenowSir +func (t *CasesCaseResponseProperties_Connector) FromCasesConnectorPropertiesServicenowSir(v CasesConnectorPropertiesServicenowSir) error { + v.Type = ".servicenow-sir" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesServicenowSir performs a merge with any union data inside the CasesCaseResponseProperties_Connector, using the provided CasesConnectorPropertiesServicenowSir +func (t *CasesCaseResponseProperties_Connector) MergeCasesConnectorPropertiesServicenowSir(v CasesConnectorPropertiesServicenowSir) error { + v.Type = ".servicenow-sir" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesSwimlane returns the union data inside the CasesCaseResponseProperties_Connector as a CasesConnectorPropertiesSwimlane +func (t CasesCaseResponseProperties_Connector) AsCasesConnectorPropertiesSwimlane() (CasesConnectorPropertiesSwimlane, error) { + var body CasesConnectorPropertiesSwimlane + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesSwimlane overwrites any union data inside the CasesCaseResponseProperties_Connector as the provided CasesConnectorPropertiesSwimlane +func (t *CasesCaseResponseProperties_Connector) FromCasesConnectorPropertiesSwimlane(v CasesConnectorPropertiesSwimlane) error { + v.Type = ".swimlane" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesSwimlane performs a merge with any union data inside the CasesCaseResponseProperties_Connector, using the provided CasesConnectorPropertiesSwimlane +func (t *CasesCaseResponseProperties_Connector) MergeCasesConnectorPropertiesSwimlane(v CasesConnectorPropertiesSwimlane) error { + v.Type = ".swimlane" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesCaseResponseProperties_Connector) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"type"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err +} + +func (t CasesCaseResponseProperties_Connector) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case ".cases-webhook": + return t.AsCasesConnectorPropertiesCasesWebhook() + case ".jira": + return t.AsCasesConnectorPropertiesJira() + case ".none": + return t.AsCasesConnectorPropertiesNone() + case ".resilient": + return t.AsCasesConnectorPropertiesResilient() + case ".servicenow": + return t.AsCasesConnectorPropertiesServicenow() + case ".servicenow-sir": + return t.AsCasesConnectorPropertiesServicenowSir() + case ".swimlane": + return t.AsCasesConnectorPropertiesSwimlane() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } +} + +func (t CasesCaseResponseProperties_Connector) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesCaseResponseProperties_Connector) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesCaseResponsePropertiesCustomFieldsValue0 returns the union data inside the CasesCaseResponseProperties_CustomFields_Value as a CasesCaseResponsePropertiesCustomFieldsValue0 +func (t CasesCaseResponseProperties_CustomFields_Value) AsCasesCaseResponsePropertiesCustomFieldsValue0() (CasesCaseResponsePropertiesCustomFieldsValue0, error) { + var body CasesCaseResponsePropertiesCustomFieldsValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesCaseResponsePropertiesCustomFieldsValue0 overwrites any union data inside the CasesCaseResponseProperties_CustomFields_Value as the provided CasesCaseResponsePropertiesCustomFieldsValue0 +func (t *CasesCaseResponseProperties_CustomFields_Value) FromCasesCaseResponsePropertiesCustomFieldsValue0(v CasesCaseResponsePropertiesCustomFieldsValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesCaseResponsePropertiesCustomFieldsValue0 performs a merge with any union data inside the CasesCaseResponseProperties_CustomFields_Value, using the provided CasesCaseResponsePropertiesCustomFieldsValue0 +func (t *CasesCaseResponseProperties_CustomFields_Value) MergeCasesCaseResponsePropertiesCustomFieldsValue0(v CasesCaseResponsePropertiesCustomFieldsValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesCaseResponsePropertiesCustomFieldsValue1 returns the union data inside the CasesCaseResponseProperties_CustomFields_Value as a CasesCaseResponsePropertiesCustomFieldsValue1 +func (t CasesCaseResponseProperties_CustomFields_Value) AsCasesCaseResponsePropertiesCustomFieldsValue1() (CasesCaseResponsePropertiesCustomFieldsValue1, error) { + var body CasesCaseResponsePropertiesCustomFieldsValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesCaseResponsePropertiesCustomFieldsValue1 overwrites any union data inside the CasesCaseResponseProperties_CustomFields_Value as the provided CasesCaseResponsePropertiesCustomFieldsValue1 +func (t *CasesCaseResponseProperties_CustomFields_Value) FromCasesCaseResponsePropertiesCustomFieldsValue1(v CasesCaseResponsePropertiesCustomFieldsValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesCaseResponsePropertiesCustomFieldsValue1 performs a merge with any union data inside the CasesCaseResponseProperties_CustomFields_Value, using the provided CasesCaseResponsePropertiesCustomFieldsValue1 +func (t *CasesCaseResponseProperties_CustomFields_Value) MergeCasesCaseResponsePropertiesCustomFieldsValue1(v CasesCaseResponsePropertiesCustomFieldsValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesCaseResponseProperties_CustomFields_Value) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesCaseResponseProperties_CustomFields_Value) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesConnectorPropertiesNone returns the union data inside the CasesCreateCaseRequest_Connector as a CasesConnectorPropertiesNone +func (t CasesCreateCaseRequest_Connector) AsCasesConnectorPropertiesNone() (CasesConnectorPropertiesNone, error) { + var body CasesConnectorPropertiesNone + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesNone overwrites any union data inside the CasesCreateCaseRequest_Connector as the provided CasesConnectorPropertiesNone +func (t *CasesCreateCaseRequest_Connector) FromCasesConnectorPropertiesNone(v CasesConnectorPropertiesNone) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesNone performs a merge with any union data inside the CasesCreateCaseRequest_Connector, using the provided CasesConnectorPropertiesNone +func (t *CasesCreateCaseRequest_Connector) MergeCasesConnectorPropertiesNone(v CasesConnectorPropertiesNone) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesCasesWebhook returns the union data inside the CasesCreateCaseRequest_Connector as a CasesConnectorPropertiesCasesWebhook +func (t CasesCreateCaseRequest_Connector) AsCasesConnectorPropertiesCasesWebhook() (CasesConnectorPropertiesCasesWebhook, error) { + var body CasesConnectorPropertiesCasesWebhook + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesCasesWebhook overwrites any union data inside the CasesCreateCaseRequest_Connector as the provided CasesConnectorPropertiesCasesWebhook +func (t *CasesCreateCaseRequest_Connector) FromCasesConnectorPropertiesCasesWebhook(v CasesConnectorPropertiesCasesWebhook) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesCasesWebhook performs a merge with any union data inside the CasesCreateCaseRequest_Connector, using the provided CasesConnectorPropertiesCasesWebhook +func (t *CasesCreateCaseRequest_Connector) MergeCasesConnectorPropertiesCasesWebhook(v CasesConnectorPropertiesCasesWebhook) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesJira returns the union data inside the CasesCreateCaseRequest_Connector as a CasesConnectorPropertiesJira +func (t CasesCreateCaseRequest_Connector) AsCasesConnectorPropertiesJira() (CasesConnectorPropertiesJira, error) { + var body CasesConnectorPropertiesJira + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesJira overwrites any union data inside the CasesCreateCaseRequest_Connector as the provided CasesConnectorPropertiesJira +func (t *CasesCreateCaseRequest_Connector) FromCasesConnectorPropertiesJira(v CasesConnectorPropertiesJira) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesJira performs a merge with any union data inside the CasesCreateCaseRequest_Connector, using the provided CasesConnectorPropertiesJira +func (t *CasesCreateCaseRequest_Connector) MergeCasesConnectorPropertiesJira(v CasesConnectorPropertiesJira) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesResilient returns the union data inside the CasesCreateCaseRequest_Connector as a CasesConnectorPropertiesResilient +func (t CasesCreateCaseRequest_Connector) AsCasesConnectorPropertiesResilient() (CasesConnectorPropertiesResilient, error) { + var body CasesConnectorPropertiesResilient + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesResilient overwrites any union data inside the CasesCreateCaseRequest_Connector as the provided CasesConnectorPropertiesResilient +func (t *CasesCreateCaseRequest_Connector) FromCasesConnectorPropertiesResilient(v CasesConnectorPropertiesResilient) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesResilient performs a merge with any union data inside the CasesCreateCaseRequest_Connector, using the provided CasesConnectorPropertiesResilient +func (t *CasesCreateCaseRequest_Connector) MergeCasesConnectorPropertiesResilient(v CasesConnectorPropertiesResilient) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesServicenow returns the union data inside the CasesCreateCaseRequest_Connector as a CasesConnectorPropertiesServicenow +func (t CasesCreateCaseRequest_Connector) AsCasesConnectorPropertiesServicenow() (CasesConnectorPropertiesServicenow, error) { + var body CasesConnectorPropertiesServicenow + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesServicenow overwrites any union data inside the CasesCreateCaseRequest_Connector as the provided CasesConnectorPropertiesServicenow +func (t *CasesCreateCaseRequest_Connector) FromCasesConnectorPropertiesServicenow(v CasesConnectorPropertiesServicenow) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesServicenow performs a merge with any union data inside the CasesCreateCaseRequest_Connector, using the provided CasesConnectorPropertiesServicenow +func (t *CasesCreateCaseRequest_Connector) MergeCasesConnectorPropertiesServicenow(v CasesConnectorPropertiesServicenow) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesServicenowSir returns the union data inside the CasesCreateCaseRequest_Connector as a CasesConnectorPropertiesServicenowSir +func (t CasesCreateCaseRequest_Connector) AsCasesConnectorPropertiesServicenowSir() (CasesConnectorPropertiesServicenowSir, error) { + var body CasesConnectorPropertiesServicenowSir + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesServicenowSir overwrites any union data inside the CasesCreateCaseRequest_Connector as the provided CasesConnectorPropertiesServicenowSir +func (t *CasesCreateCaseRequest_Connector) FromCasesConnectorPropertiesServicenowSir(v CasesConnectorPropertiesServicenowSir) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesServicenowSir performs a merge with any union data inside the CasesCreateCaseRequest_Connector, using the provided CasesConnectorPropertiesServicenowSir +func (t *CasesCreateCaseRequest_Connector) MergeCasesConnectorPropertiesServicenowSir(v CasesConnectorPropertiesServicenowSir) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesSwimlane returns the union data inside the CasesCreateCaseRequest_Connector as a CasesConnectorPropertiesSwimlane +func (t CasesCreateCaseRequest_Connector) AsCasesConnectorPropertiesSwimlane() (CasesConnectorPropertiesSwimlane, error) { + var body CasesConnectorPropertiesSwimlane + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesSwimlane overwrites any union data inside the CasesCreateCaseRequest_Connector as the provided CasesConnectorPropertiesSwimlane +func (t *CasesCreateCaseRequest_Connector) FromCasesConnectorPropertiesSwimlane(v CasesConnectorPropertiesSwimlane) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesSwimlane performs a merge with any union data inside the CasesCreateCaseRequest_Connector, using the provided CasesConnectorPropertiesSwimlane +func (t *CasesCreateCaseRequest_Connector) MergeCasesConnectorPropertiesSwimlane(v CasesConnectorPropertiesSwimlane) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesCreateCaseRequest_Connector) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesCreateCaseRequest_Connector) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesCreateCaseRequestCustomFieldsValue0 returns the union data inside the CasesCreateCaseRequest_CustomFields_Value as a CasesCreateCaseRequestCustomFieldsValue0 +func (t CasesCreateCaseRequest_CustomFields_Value) AsCasesCreateCaseRequestCustomFieldsValue0() (CasesCreateCaseRequestCustomFieldsValue0, error) { + var body CasesCreateCaseRequestCustomFieldsValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesCreateCaseRequestCustomFieldsValue0 overwrites any union data inside the CasesCreateCaseRequest_CustomFields_Value as the provided CasesCreateCaseRequestCustomFieldsValue0 +func (t *CasesCreateCaseRequest_CustomFields_Value) FromCasesCreateCaseRequestCustomFieldsValue0(v CasesCreateCaseRequestCustomFieldsValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesCreateCaseRequestCustomFieldsValue0 performs a merge with any union data inside the CasesCreateCaseRequest_CustomFields_Value, using the provided CasesCreateCaseRequestCustomFieldsValue0 +func (t *CasesCreateCaseRequest_CustomFields_Value) MergeCasesCreateCaseRequestCustomFieldsValue0(v CasesCreateCaseRequestCustomFieldsValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesCreateCaseRequestCustomFieldsValue1 returns the union data inside the CasesCreateCaseRequest_CustomFields_Value as a CasesCreateCaseRequestCustomFieldsValue1 +func (t CasesCreateCaseRequest_CustomFields_Value) AsCasesCreateCaseRequestCustomFieldsValue1() (CasesCreateCaseRequestCustomFieldsValue1, error) { + var body CasesCreateCaseRequestCustomFieldsValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesCreateCaseRequestCustomFieldsValue1 overwrites any union data inside the CasesCreateCaseRequest_CustomFields_Value as the provided CasesCreateCaseRequestCustomFieldsValue1 +func (t *CasesCreateCaseRequest_CustomFields_Value) FromCasesCreateCaseRequestCustomFieldsValue1(v CasesCreateCaseRequestCustomFieldsValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesCreateCaseRequestCustomFieldsValue1 performs a merge with any union data inside the CasesCreateCaseRequest_CustomFields_Value, using the provided CasesCreateCaseRequestCustomFieldsValue1 +func (t *CasesCreateCaseRequest_CustomFields_Value) MergeCasesCreateCaseRequestCustomFieldsValue1(v CasesCreateCaseRequestCustomFieldsValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesCreateCaseRequest_CustomFields_Value) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesCreateCaseRequest_CustomFields_Value) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesPayloadAlertCommentCommentAlertId0 returns the union data inside the CasesPayloadAlertComment_Comment_AlertId as a CasesPayloadAlertCommentCommentAlertId0 +func (t CasesPayloadAlertComment_Comment_AlertId) AsCasesPayloadAlertCommentCommentAlertId0() (CasesPayloadAlertCommentCommentAlertId0, error) { + var body CasesPayloadAlertCommentCommentAlertId0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadAlertCommentCommentAlertId0 overwrites any union data inside the CasesPayloadAlertComment_Comment_AlertId as the provided CasesPayloadAlertCommentCommentAlertId0 +func (t *CasesPayloadAlertComment_Comment_AlertId) FromCasesPayloadAlertCommentCommentAlertId0(v CasesPayloadAlertCommentCommentAlertId0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadAlertCommentCommentAlertId0 performs a merge with any union data inside the CasesPayloadAlertComment_Comment_AlertId, using the provided CasesPayloadAlertCommentCommentAlertId0 +func (t *CasesPayloadAlertComment_Comment_AlertId) MergeCasesPayloadAlertCommentCommentAlertId0(v CasesPayloadAlertCommentCommentAlertId0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadAlertCommentCommentAlertId1 returns the union data inside the CasesPayloadAlertComment_Comment_AlertId as a CasesPayloadAlertCommentCommentAlertId1 +func (t CasesPayloadAlertComment_Comment_AlertId) AsCasesPayloadAlertCommentCommentAlertId1() (CasesPayloadAlertCommentCommentAlertId1, error) { + var body CasesPayloadAlertCommentCommentAlertId1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadAlertCommentCommentAlertId1 overwrites any union data inside the CasesPayloadAlertComment_Comment_AlertId as the provided CasesPayloadAlertCommentCommentAlertId1 +func (t *CasesPayloadAlertComment_Comment_AlertId) FromCasesPayloadAlertCommentCommentAlertId1(v CasesPayloadAlertCommentCommentAlertId1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadAlertCommentCommentAlertId1 performs a merge with any union data inside the CasesPayloadAlertComment_Comment_AlertId, using the provided CasesPayloadAlertCommentCommentAlertId1 +func (t *CasesPayloadAlertComment_Comment_AlertId) MergeCasesPayloadAlertCommentCommentAlertId1(v CasesPayloadAlertCommentCommentAlertId1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesPayloadAlertComment_Comment_AlertId) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesPayloadAlertComment_Comment_AlertId) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesPayloadAlertCommentCommentIndex0 returns the union data inside the CasesPayloadAlertComment_Comment_Index as a CasesPayloadAlertCommentCommentIndex0 +func (t CasesPayloadAlertComment_Comment_Index) AsCasesPayloadAlertCommentCommentIndex0() (CasesPayloadAlertCommentCommentIndex0, error) { + var body CasesPayloadAlertCommentCommentIndex0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadAlertCommentCommentIndex0 overwrites any union data inside the CasesPayloadAlertComment_Comment_Index as the provided CasesPayloadAlertCommentCommentIndex0 +func (t *CasesPayloadAlertComment_Comment_Index) FromCasesPayloadAlertCommentCommentIndex0(v CasesPayloadAlertCommentCommentIndex0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadAlertCommentCommentIndex0 performs a merge with any union data inside the CasesPayloadAlertComment_Comment_Index, using the provided CasesPayloadAlertCommentCommentIndex0 +func (t *CasesPayloadAlertComment_Comment_Index) MergeCasesPayloadAlertCommentCommentIndex0(v CasesPayloadAlertCommentCommentIndex0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadAlertCommentCommentIndex1 returns the union data inside the CasesPayloadAlertComment_Comment_Index as a CasesPayloadAlertCommentCommentIndex1 +func (t CasesPayloadAlertComment_Comment_Index) AsCasesPayloadAlertCommentCommentIndex1() (CasesPayloadAlertCommentCommentIndex1, error) { + var body CasesPayloadAlertCommentCommentIndex1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadAlertCommentCommentIndex1 overwrites any union data inside the CasesPayloadAlertComment_Comment_Index as the provided CasesPayloadAlertCommentCommentIndex1 +func (t *CasesPayloadAlertComment_Comment_Index) FromCasesPayloadAlertCommentCommentIndex1(v CasesPayloadAlertCommentCommentIndex1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadAlertCommentCommentIndex1 performs a merge with any union data inside the CasesPayloadAlertComment_Comment_Index, using the provided CasesPayloadAlertCommentCommentIndex1 +func (t *CasesPayloadAlertComment_Comment_Index) MergeCasesPayloadAlertCommentCommentIndex1(v CasesPayloadAlertCommentCommentIndex1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesPayloadAlertComment_Comment_Index) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesPayloadAlertComment_Comment_Index) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesSetCaseConfigurationRequestCustomFieldsDefaultValue0 returns the union data inside the CasesSetCaseConfigurationRequest_CustomFields_DefaultValue as a CasesSetCaseConfigurationRequestCustomFieldsDefaultValue0 +func (t CasesSetCaseConfigurationRequest_CustomFields_DefaultValue) AsCasesSetCaseConfigurationRequestCustomFieldsDefaultValue0() (CasesSetCaseConfigurationRequestCustomFieldsDefaultValue0, error) { + var body CasesSetCaseConfigurationRequestCustomFieldsDefaultValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesSetCaseConfigurationRequestCustomFieldsDefaultValue0 overwrites any union data inside the CasesSetCaseConfigurationRequest_CustomFields_DefaultValue as the provided CasesSetCaseConfigurationRequestCustomFieldsDefaultValue0 +func (t *CasesSetCaseConfigurationRequest_CustomFields_DefaultValue) FromCasesSetCaseConfigurationRequestCustomFieldsDefaultValue0(v CasesSetCaseConfigurationRequestCustomFieldsDefaultValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesSetCaseConfigurationRequestCustomFieldsDefaultValue0 performs a merge with any union data inside the CasesSetCaseConfigurationRequest_CustomFields_DefaultValue, using the provided CasesSetCaseConfigurationRequestCustomFieldsDefaultValue0 +func (t *CasesSetCaseConfigurationRequest_CustomFields_DefaultValue) MergeCasesSetCaseConfigurationRequestCustomFieldsDefaultValue0(v CasesSetCaseConfigurationRequestCustomFieldsDefaultValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesSetCaseConfigurationRequestCustomFieldsDefaultValue1 returns the union data inside the CasesSetCaseConfigurationRequest_CustomFields_DefaultValue as a CasesSetCaseConfigurationRequestCustomFieldsDefaultValue1 +func (t CasesSetCaseConfigurationRequest_CustomFields_DefaultValue) AsCasesSetCaseConfigurationRequestCustomFieldsDefaultValue1() (CasesSetCaseConfigurationRequestCustomFieldsDefaultValue1, error) { + var body CasesSetCaseConfigurationRequestCustomFieldsDefaultValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesSetCaseConfigurationRequestCustomFieldsDefaultValue1 overwrites any union data inside the CasesSetCaseConfigurationRequest_CustomFields_DefaultValue as the provided CasesSetCaseConfigurationRequestCustomFieldsDefaultValue1 +func (t *CasesSetCaseConfigurationRequest_CustomFields_DefaultValue) FromCasesSetCaseConfigurationRequestCustomFieldsDefaultValue1(v CasesSetCaseConfigurationRequestCustomFieldsDefaultValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesSetCaseConfigurationRequestCustomFieldsDefaultValue1 performs a merge with any union data inside the CasesSetCaseConfigurationRequest_CustomFields_DefaultValue, using the provided CasesSetCaseConfigurationRequestCustomFieldsDefaultValue1 +func (t *CasesSetCaseConfigurationRequest_CustomFields_DefaultValue) MergeCasesSetCaseConfigurationRequestCustomFieldsDefaultValue1(v CasesSetCaseConfigurationRequestCustomFieldsDefaultValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesSetCaseConfigurationRequest_CustomFields_DefaultValue) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesSetCaseConfigurationRequest_CustomFields_DefaultValue) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesTemplatesCaseFieldsCustomFieldsValue0 returns the union data inside the CasesTemplates_CaseFields_CustomFields_Value as a CasesTemplatesCaseFieldsCustomFieldsValue0 +func (t CasesTemplates_CaseFields_CustomFields_Value) AsCasesTemplatesCaseFieldsCustomFieldsValue0() (CasesTemplatesCaseFieldsCustomFieldsValue0, error) { + var body CasesTemplatesCaseFieldsCustomFieldsValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesTemplatesCaseFieldsCustomFieldsValue0 overwrites any union data inside the CasesTemplates_CaseFields_CustomFields_Value as the provided CasesTemplatesCaseFieldsCustomFieldsValue0 +func (t *CasesTemplates_CaseFields_CustomFields_Value) FromCasesTemplatesCaseFieldsCustomFieldsValue0(v CasesTemplatesCaseFieldsCustomFieldsValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesTemplatesCaseFieldsCustomFieldsValue0 performs a merge with any union data inside the CasesTemplates_CaseFields_CustomFields_Value, using the provided CasesTemplatesCaseFieldsCustomFieldsValue0 +func (t *CasesTemplates_CaseFields_CustomFields_Value) MergeCasesTemplatesCaseFieldsCustomFieldsValue0(v CasesTemplatesCaseFieldsCustomFieldsValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesTemplatesCaseFieldsCustomFieldsValue1 returns the union data inside the CasesTemplates_CaseFields_CustomFields_Value as a CasesTemplatesCaseFieldsCustomFieldsValue1 +func (t CasesTemplates_CaseFields_CustomFields_Value) AsCasesTemplatesCaseFieldsCustomFieldsValue1() (CasesTemplatesCaseFieldsCustomFieldsValue1, error) { + var body CasesTemplatesCaseFieldsCustomFieldsValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesTemplatesCaseFieldsCustomFieldsValue1 overwrites any union data inside the CasesTemplates_CaseFields_CustomFields_Value as the provided CasesTemplatesCaseFieldsCustomFieldsValue1 +func (t *CasesTemplates_CaseFields_CustomFields_Value) FromCasesTemplatesCaseFieldsCustomFieldsValue1(v CasesTemplatesCaseFieldsCustomFieldsValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesTemplatesCaseFieldsCustomFieldsValue1 performs a merge with any union data inside the CasesTemplates_CaseFields_CustomFields_Value, using the provided CasesTemplatesCaseFieldsCustomFieldsValue1 +func (t *CasesTemplates_CaseFields_CustomFields_Value) MergeCasesTemplatesCaseFieldsCustomFieldsValue1(v CasesTemplatesCaseFieldsCustomFieldsValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesTemplates_CaseFields_CustomFields_Value) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesTemplates_CaseFields_CustomFields_Value) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesUpdateAlertCommentRequestProperties returns the union data inside the CasesUpdateCaseCommentRequest as a CasesUpdateAlertCommentRequestProperties +func (t CasesUpdateCaseCommentRequest) AsCasesUpdateAlertCommentRequestProperties() (CasesUpdateAlertCommentRequestProperties, error) { + var body CasesUpdateAlertCommentRequestProperties + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesUpdateAlertCommentRequestProperties overwrites any union data inside the CasesUpdateCaseCommentRequest as the provided CasesUpdateAlertCommentRequestProperties +func (t *CasesUpdateCaseCommentRequest) FromCasesUpdateAlertCommentRequestProperties(v CasesUpdateAlertCommentRequestProperties) error { + v.Type = "alert" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesUpdateAlertCommentRequestProperties performs a merge with any union data inside the CasesUpdateCaseCommentRequest, using the provided CasesUpdateAlertCommentRequestProperties +func (t *CasesUpdateCaseCommentRequest) MergeCasesUpdateAlertCommentRequestProperties(v CasesUpdateAlertCommentRequestProperties) error { + v.Type = "alert" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesUpdateUserCommentRequestProperties returns the union data inside the CasesUpdateCaseCommentRequest as a CasesUpdateUserCommentRequestProperties +func (t CasesUpdateCaseCommentRequest) AsCasesUpdateUserCommentRequestProperties() (CasesUpdateUserCommentRequestProperties, error) { + var body CasesUpdateUserCommentRequestProperties + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesUpdateUserCommentRequestProperties overwrites any union data inside the CasesUpdateCaseCommentRequest as the provided CasesUpdateUserCommentRequestProperties +func (t *CasesUpdateCaseCommentRequest) FromCasesUpdateUserCommentRequestProperties(v CasesUpdateUserCommentRequestProperties) error { + v.Type = "user" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesUpdateUserCommentRequestProperties performs a merge with any union data inside the CasesUpdateCaseCommentRequest, using the provided CasesUpdateUserCommentRequestProperties +func (t *CasesUpdateCaseCommentRequest) MergeCasesUpdateUserCommentRequestProperties(v CasesUpdateUserCommentRequestProperties) error { + v.Type = "user" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesUpdateCaseCommentRequest) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"type"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err +} + +func (t CasesUpdateCaseCommentRequest) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case "alert": + return t.AsCasesUpdateAlertCommentRequestProperties() + case "user": + return t.AsCasesUpdateUserCommentRequestProperties() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } +} + +func (t CasesUpdateCaseCommentRequest) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesUpdateCaseCommentRequest) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0 returns the union data inside the CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue as a CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0 +func (t CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue) AsCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0() (CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0, error) { + var body CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0 overwrites any union data inside the CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue as the provided CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0 +func (t *CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue) FromCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0(v CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0 performs a merge with any union data inside the CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue, using the provided CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0 +func (t *CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue) MergeCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0(v CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1 returns the union data inside the CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue as a CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1 +func (t CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue) AsCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1() (CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1, error) { + var body CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1 overwrites any union data inside the CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue as the provided CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1 +func (t *CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue) FromCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1(v CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1 performs a merge with any union data inside the CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue, using the provided CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1 +func (t *CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue) MergeCasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1(v CasesUpdateCaseConfigurationRequestCustomFieldsDefaultValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesUpdateCaseConfigurationRequest_CustomFields_DefaultValue) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesConnectorPropertiesNone returns the union data inside the CasesUpdateCaseRequest_Cases_Connector as a CasesConnectorPropertiesNone +func (t CasesUpdateCaseRequest_Cases_Connector) AsCasesConnectorPropertiesNone() (CasesConnectorPropertiesNone, error) { + var body CasesConnectorPropertiesNone + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesNone overwrites any union data inside the CasesUpdateCaseRequest_Cases_Connector as the provided CasesConnectorPropertiesNone +func (t *CasesUpdateCaseRequest_Cases_Connector) FromCasesConnectorPropertiesNone(v CasesConnectorPropertiesNone) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesNone performs a merge with any union data inside the CasesUpdateCaseRequest_Cases_Connector, using the provided CasesConnectorPropertiesNone +func (t *CasesUpdateCaseRequest_Cases_Connector) MergeCasesConnectorPropertiesNone(v CasesConnectorPropertiesNone) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesCasesWebhook returns the union data inside the CasesUpdateCaseRequest_Cases_Connector as a CasesConnectorPropertiesCasesWebhook +func (t CasesUpdateCaseRequest_Cases_Connector) AsCasesConnectorPropertiesCasesWebhook() (CasesConnectorPropertiesCasesWebhook, error) { + var body CasesConnectorPropertiesCasesWebhook + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesCasesWebhook overwrites any union data inside the CasesUpdateCaseRequest_Cases_Connector as the provided CasesConnectorPropertiesCasesWebhook +func (t *CasesUpdateCaseRequest_Cases_Connector) FromCasesConnectorPropertiesCasesWebhook(v CasesConnectorPropertiesCasesWebhook) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesCasesWebhook performs a merge with any union data inside the CasesUpdateCaseRequest_Cases_Connector, using the provided CasesConnectorPropertiesCasesWebhook +func (t *CasesUpdateCaseRequest_Cases_Connector) MergeCasesConnectorPropertiesCasesWebhook(v CasesConnectorPropertiesCasesWebhook) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesJira returns the union data inside the CasesUpdateCaseRequest_Cases_Connector as a CasesConnectorPropertiesJira +func (t CasesUpdateCaseRequest_Cases_Connector) AsCasesConnectorPropertiesJira() (CasesConnectorPropertiesJira, error) { + var body CasesConnectorPropertiesJira + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesJira overwrites any union data inside the CasesUpdateCaseRequest_Cases_Connector as the provided CasesConnectorPropertiesJira +func (t *CasesUpdateCaseRequest_Cases_Connector) FromCasesConnectorPropertiesJira(v CasesConnectorPropertiesJira) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesJira performs a merge with any union data inside the CasesUpdateCaseRequest_Cases_Connector, using the provided CasesConnectorPropertiesJira +func (t *CasesUpdateCaseRequest_Cases_Connector) MergeCasesConnectorPropertiesJira(v CasesConnectorPropertiesJira) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesResilient returns the union data inside the CasesUpdateCaseRequest_Cases_Connector as a CasesConnectorPropertiesResilient +func (t CasesUpdateCaseRequest_Cases_Connector) AsCasesConnectorPropertiesResilient() (CasesConnectorPropertiesResilient, error) { + var body CasesConnectorPropertiesResilient + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesResilient overwrites any union data inside the CasesUpdateCaseRequest_Cases_Connector as the provided CasesConnectorPropertiesResilient +func (t *CasesUpdateCaseRequest_Cases_Connector) FromCasesConnectorPropertiesResilient(v CasesConnectorPropertiesResilient) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesResilient performs a merge with any union data inside the CasesUpdateCaseRequest_Cases_Connector, using the provided CasesConnectorPropertiesResilient +func (t *CasesUpdateCaseRequest_Cases_Connector) MergeCasesConnectorPropertiesResilient(v CasesConnectorPropertiesResilient) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesServicenow returns the union data inside the CasesUpdateCaseRequest_Cases_Connector as a CasesConnectorPropertiesServicenow +func (t CasesUpdateCaseRequest_Cases_Connector) AsCasesConnectorPropertiesServicenow() (CasesConnectorPropertiesServicenow, error) { + var body CasesConnectorPropertiesServicenow + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesServicenow overwrites any union data inside the CasesUpdateCaseRequest_Cases_Connector as the provided CasesConnectorPropertiesServicenow +func (t *CasesUpdateCaseRequest_Cases_Connector) FromCasesConnectorPropertiesServicenow(v CasesConnectorPropertiesServicenow) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesServicenow performs a merge with any union data inside the CasesUpdateCaseRequest_Cases_Connector, using the provided CasesConnectorPropertiesServicenow +func (t *CasesUpdateCaseRequest_Cases_Connector) MergeCasesConnectorPropertiesServicenow(v CasesConnectorPropertiesServicenow) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesServicenowSir returns the union data inside the CasesUpdateCaseRequest_Cases_Connector as a CasesConnectorPropertiesServicenowSir +func (t CasesUpdateCaseRequest_Cases_Connector) AsCasesConnectorPropertiesServicenowSir() (CasesConnectorPropertiesServicenowSir, error) { + var body CasesConnectorPropertiesServicenowSir + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesServicenowSir overwrites any union data inside the CasesUpdateCaseRequest_Cases_Connector as the provided CasesConnectorPropertiesServicenowSir +func (t *CasesUpdateCaseRequest_Cases_Connector) FromCasesConnectorPropertiesServicenowSir(v CasesConnectorPropertiesServicenowSir) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesServicenowSir performs a merge with any union data inside the CasesUpdateCaseRequest_Cases_Connector, using the provided CasesConnectorPropertiesServicenowSir +func (t *CasesUpdateCaseRequest_Cases_Connector) MergeCasesConnectorPropertiesServicenowSir(v CasesConnectorPropertiesServicenowSir) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesConnectorPropertiesSwimlane returns the union data inside the CasesUpdateCaseRequest_Cases_Connector as a CasesConnectorPropertiesSwimlane +func (t CasesUpdateCaseRequest_Cases_Connector) AsCasesConnectorPropertiesSwimlane() (CasesConnectorPropertiesSwimlane, error) { + var body CasesConnectorPropertiesSwimlane + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesConnectorPropertiesSwimlane overwrites any union data inside the CasesUpdateCaseRequest_Cases_Connector as the provided CasesConnectorPropertiesSwimlane +func (t *CasesUpdateCaseRequest_Cases_Connector) FromCasesConnectorPropertiesSwimlane(v CasesConnectorPropertiesSwimlane) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesConnectorPropertiesSwimlane performs a merge with any union data inside the CasesUpdateCaseRequest_Cases_Connector, using the provided CasesConnectorPropertiesSwimlane +func (t *CasesUpdateCaseRequest_Cases_Connector) MergeCasesConnectorPropertiesSwimlane(v CasesConnectorPropertiesSwimlane) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesUpdateCaseRequest_Cases_Connector) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesUpdateCaseRequest_Cases_Connector) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesUpdateCaseRequestCasesCustomFieldsValue0 returns the union data inside the CasesUpdateCaseRequest_Cases_CustomFields_Value as a CasesUpdateCaseRequestCasesCustomFieldsValue0 +func (t CasesUpdateCaseRequest_Cases_CustomFields_Value) AsCasesUpdateCaseRequestCasesCustomFieldsValue0() (CasesUpdateCaseRequestCasesCustomFieldsValue0, error) { + var body CasesUpdateCaseRequestCasesCustomFieldsValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesUpdateCaseRequestCasesCustomFieldsValue0 overwrites any union data inside the CasesUpdateCaseRequest_Cases_CustomFields_Value as the provided CasesUpdateCaseRequestCasesCustomFieldsValue0 +func (t *CasesUpdateCaseRequest_Cases_CustomFields_Value) FromCasesUpdateCaseRequestCasesCustomFieldsValue0(v CasesUpdateCaseRequestCasesCustomFieldsValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesUpdateCaseRequestCasesCustomFieldsValue0 performs a merge with any union data inside the CasesUpdateCaseRequest_Cases_CustomFields_Value, using the provided CasesUpdateCaseRequestCasesCustomFieldsValue0 +func (t *CasesUpdateCaseRequest_Cases_CustomFields_Value) MergeCasesUpdateCaseRequestCasesCustomFieldsValue0(v CasesUpdateCaseRequestCasesCustomFieldsValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesUpdateCaseRequestCasesCustomFieldsValue1 returns the union data inside the CasesUpdateCaseRequest_Cases_CustomFields_Value as a CasesUpdateCaseRequestCasesCustomFieldsValue1 +func (t CasesUpdateCaseRequest_Cases_CustomFields_Value) AsCasesUpdateCaseRequestCasesCustomFieldsValue1() (CasesUpdateCaseRequestCasesCustomFieldsValue1, error) { + var body CasesUpdateCaseRequestCasesCustomFieldsValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesUpdateCaseRequestCasesCustomFieldsValue1 overwrites any union data inside the CasesUpdateCaseRequest_Cases_CustomFields_Value as the provided CasesUpdateCaseRequestCasesCustomFieldsValue1 +func (t *CasesUpdateCaseRequest_Cases_CustomFields_Value) FromCasesUpdateCaseRequestCasesCustomFieldsValue1(v CasesUpdateCaseRequestCasesCustomFieldsValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesUpdateCaseRequestCasesCustomFieldsValue1 performs a merge with any union data inside the CasesUpdateCaseRequest_Cases_CustomFields_Value, using the provided CasesUpdateCaseRequestCasesCustomFieldsValue1 +func (t *CasesUpdateCaseRequest_Cases_CustomFields_Value) MergeCasesUpdateCaseRequestCasesCustomFieldsValue1(v CasesUpdateCaseRequestCasesCustomFieldsValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesUpdateCaseRequest_Cases_CustomFields_Value) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesUpdateCaseRequest_Cases_CustomFields_Value) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesPayloadAlertComment returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadAlertComment +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadAlertComment() (CasesPayloadAlertComment, error) { + var body CasesPayloadAlertComment + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadAlertComment overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadAlertComment +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadAlertComment(v CasesPayloadAlertComment) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadAlertComment performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadAlertComment +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadAlertComment(v CasesPayloadAlertComment) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadAssignees returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadAssignees +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadAssignees() (CasesPayloadAssignees, error) { + var body CasesPayloadAssignees + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadAssignees overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadAssignees +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadAssignees(v CasesPayloadAssignees) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadAssignees performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadAssignees +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadAssignees(v CasesPayloadAssignees) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadConnector returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadConnector +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadConnector() (CasesPayloadConnector, error) { + var body CasesPayloadConnector + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadConnector overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadConnector +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadConnector(v CasesPayloadConnector) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadConnector performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadConnector +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadConnector(v CasesPayloadConnector) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadCreateCase returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadCreateCase +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadCreateCase() (CasesPayloadCreateCase, error) { + var body CasesPayloadCreateCase + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadCreateCase overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadCreateCase +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadCreateCase(v CasesPayloadCreateCase) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadCreateCase performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadCreateCase +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadCreateCase(v CasesPayloadCreateCase) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadDelete returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadDelete +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadDelete() (CasesPayloadDelete, error) { + var body CasesPayloadDelete + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadDelete overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadDelete +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadDelete(v CasesPayloadDelete) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadDelete performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadDelete +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadDelete(v CasesPayloadDelete) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadDescription returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadDescription +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadDescription() (CasesPayloadDescription, error) { + var body CasesPayloadDescription + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadDescription overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadDescription +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadDescription(v CasesPayloadDescription) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadDescription performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadDescription +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadDescription(v CasesPayloadDescription) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadPushed returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadPushed +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadPushed() (CasesPayloadPushed, error) { + var body CasesPayloadPushed + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadPushed overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadPushed +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadPushed(v CasesPayloadPushed) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadPushed performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadPushed +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadPushed(v CasesPayloadPushed) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadSettings returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadSettings +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadSettings() (CasesPayloadSettings, error) { + var body CasesPayloadSettings + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadSettings overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadSettings +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadSettings(v CasesPayloadSettings) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadSettings performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadSettings +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadSettings(v CasesPayloadSettings) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadSeverity returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadSeverity +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadSeverity() (CasesPayloadSeverity, error) { + var body CasesPayloadSeverity + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadSeverity overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadSeverity +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadSeverity(v CasesPayloadSeverity) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadSeverity performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadSeverity +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadSeverity(v CasesPayloadSeverity) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadStatus returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadStatus +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadStatus() (CasesPayloadStatus, error) { + var body CasesPayloadStatus + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadStatus overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadStatus +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadStatus(v CasesPayloadStatus) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadStatus performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadStatus +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadStatus(v CasesPayloadStatus) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadTags returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadTags +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadTags() (CasesPayloadTags, error) { + var body CasesPayloadTags + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadTags overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadTags +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadTags(v CasesPayloadTags) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadTags performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadTags +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadTags(v CasesPayloadTags) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadTitle returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadTitle +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadTitle() (CasesPayloadTitle, error) { + var body CasesPayloadTitle + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadTitle overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadTitle +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadTitle(v CasesPayloadTitle) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadTitle performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadTitle +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadTitle(v CasesPayloadTitle) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesPayloadUserComment returns the union data inside the CasesUserActionsFindResponseProperties_Payload as a CasesPayloadUserComment +func (t CasesUserActionsFindResponseProperties_Payload) AsCasesPayloadUserComment() (CasesPayloadUserComment, error) { + var body CasesPayloadUserComment + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesPayloadUserComment overwrites any union data inside the CasesUserActionsFindResponseProperties_Payload as the provided CasesPayloadUserComment +func (t *CasesUserActionsFindResponseProperties_Payload) FromCasesPayloadUserComment(v CasesPayloadUserComment) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesPayloadUserComment performs a merge with any union data inside the CasesUserActionsFindResponseProperties_Payload, using the provided CasesPayloadUserComment +func (t *CasesUserActionsFindResponseProperties_Payload) MergeCasesPayloadUserComment(v CasesPayloadUserComment) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesUserActionsFindResponseProperties_Payload) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesUserActionsFindResponseProperties_Payload) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsDataViewsSwapDataViewRequestObjectForId0 returns the union data inside the DataViewsSwapDataViewRequestObject_ForId as a DataViewsSwapDataViewRequestObjectForId0 +func (t DataViewsSwapDataViewRequestObject_ForId) AsDataViewsSwapDataViewRequestObjectForId0() (DataViewsSwapDataViewRequestObjectForId0, error) { + var body DataViewsSwapDataViewRequestObjectForId0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromDataViewsSwapDataViewRequestObjectForId0 overwrites any union data inside the DataViewsSwapDataViewRequestObject_ForId as the provided DataViewsSwapDataViewRequestObjectForId0 +func (t *DataViewsSwapDataViewRequestObject_ForId) FromDataViewsSwapDataViewRequestObjectForId0(v DataViewsSwapDataViewRequestObjectForId0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeDataViewsSwapDataViewRequestObjectForId0 performs a merge with any union data inside the DataViewsSwapDataViewRequestObject_ForId, using the provided DataViewsSwapDataViewRequestObjectForId0 +func (t *DataViewsSwapDataViewRequestObject_ForId) MergeDataViewsSwapDataViewRequestObjectForId0(v DataViewsSwapDataViewRequestObjectForId0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsDataViewsSwapDataViewRequestObjectForId1 returns the union data inside the DataViewsSwapDataViewRequestObject_ForId as a DataViewsSwapDataViewRequestObjectForId1 +func (t DataViewsSwapDataViewRequestObject_ForId) AsDataViewsSwapDataViewRequestObjectForId1() (DataViewsSwapDataViewRequestObjectForId1, error) { + var body DataViewsSwapDataViewRequestObjectForId1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromDataViewsSwapDataViewRequestObjectForId1 overwrites any union data inside the DataViewsSwapDataViewRequestObject_ForId as the provided DataViewsSwapDataViewRequestObjectForId1 +func (t *DataViewsSwapDataViewRequestObject_ForId) FromDataViewsSwapDataViewRequestObjectForId1(v DataViewsSwapDataViewRequestObjectForId1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeDataViewsSwapDataViewRequestObjectForId1 performs a merge with any union data inside the DataViewsSwapDataViewRequestObject_ForId, using the provided DataViewsSwapDataViewRequestObjectForId1 +func (t *DataViewsSwapDataViewRequestObject_ForId) MergeDataViewsSwapDataViewRequestObjectForId1(v DataViewsSwapDataViewRequestObjectForId1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t DataViewsSwapDataViewRequestObject_ForId) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *DataViewsSwapDataViewRequestObject_ForId) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsObservabilityAIAssistantAPIInstruction0 returns the union data inside the ObservabilityAIAssistantAPIInstruction as a ObservabilityAIAssistantAPIInstruction0 +func (t ObservabilityAIAssistantAPIInstruction) AsObservabilityAIAssistantAPIInstruction0() (ObservabilityAIAssistantAPIInstruction0, error) { + var body ObservabilityAIAssistantAPIInstruction0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromObservabilityAIAssistantAPIInstruction0 overwrites any union data inside the ObservabilityAIAssistantAPIInstruction as the provided ObservabilityAIAssistantAPIInstruction0 +func (t *ObservabilityAIAssistantAPIInstruction) FromObservabilityAIAssistantAPIInstruction0(v ObservabilityAIAssistantAPIInstruction0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeObservabilityAIAssistantAPIInstruction0 performs a merge with any union data inside the ObservabilityAIAssistantAPIInstruction, using the provided ObservabilityAIAssistantAPIInstruction0 +func (t *ObservabilityAIAssistantAPIInstruction) MergeObservabilityAIAssistantAPIInstruction0(v ObservabilityAIAssistantAPIInstruction0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsObservabilityAIAssistantAPIInstruction1 returns the union data inside the ObservabilityAIAssistantAPIInstruction as a ObservabilityAIAssistantAPIInstruction1 +func (t ObservabilityAIAssistantAPIInstruction) AsObservabilityAIAssistantAPIInstruction1() (ObservabilityAIAssistantAPIInstruction1, error) { + var body ObservabilityAIAssistantAPIInstruction1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromObservabilityAIAssistantAPIInstruction1 overwrites any union data inside the ObservabilityAIAssistantAPIInstruction as the provided ObservabilityAIAssistantAPIInstruction1 +func (t *ObservabilityAIAssistantAPIInstruction) FromObservabilityAIAssistantAPIInstruction1(v ObservabilityAIAssistantAPIInstruction1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeObservabilityAIAssistantAPIInstruction1 performs a merge with any union data inside the ObservabilityAIAssistantAPIInstruction, using the provided ObservabilityAIAssistantAPIInstruction1 +func (t *ObservabilityAIAssistantAPIInstruction) MergeObservabilityAIAssistantAPIInstruction1(v ObservabilityAIAssistantAPIInstruction1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ObservabilityAIAssistantAPIInstruction) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ObservabilityAIAssistantAPIInstruction) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsBulkPurgeRollupRequestPurgePolicy0 returns the union data inside the SLOsBulkPurgeRollupRequest_PurgePolicy as a SLOsBulkPurgeRollupRequestPurgePolicy0 +func (t SLOsBulkPurgeRollupRequest_PurgePolicy) AsSLOsBulkPurgeRollupRequestPurgePolicy0() (SLOsBulkPurgeRollupRequestPurgePolicy0, error) { + var body SLOsBulkPurgeRollupRequestPurgePolicy0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsBulkPurgeRollupRequestPurgePolicy0 overwrites any union data inside the SLOsBulkPurgeRollupRequest_PurgePolicy as the provided SLOsBulkPurgeRollupRequestPurgePolicy0 +func (t *SLOsBulkPurgeRollupRequest_PurgePolicy) FromSLOsBulkPurgeRollupRequestPurgePolicy0(v SLOsBulkPurgeRollupRequestPurgePolicy0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsBulkPurgeRollupRequestPurgePolicy0 performs a merge with any union data inside the SLOsBulkPurgeRollupRequest_PurgePolicy, using the provided SLOsBulkPurgeRollupRequestPurgePolicy0 +func (t *SLOsBulkPurgeRollupRequest_PurgePolicy) MergeSLOsBulkPurgeRollupRequestPurgePolicy0(v SLOsBulkPurgeRollupRequestPurgePolicy0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsBulkPurgeRollupRequestPurgePolicy1 returns the union data inside the SLOsBulkPurgeRollupRequest_PurgePolicy as a SLOsBulkPurgeRollupRequestPurgePolicy1 +func (t SLOsBulkPurgeRollupRequest_PurgePolicy) AsSLOsBulkPurgeRollupRequestPurgePolicy1() (SLOsBulkPurgeRollupRequestPurgePolicy1, error) { + var body SLOsBulkPurgeRollupRequestPurgePolicy1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsBulkPurgeRollupRequestPurgePolicy1 overwrites any union data inside the SLOsBulkPurgeRollupRequest_PurgePolicy as the provided SLOsBulkPurgeRollupRequestPurgePolicy1 +func (t *SLOsBulkPurgeRollupRequest_PurgePolicy) FromSLOsBulkPurgeRollupRequestPurgePolicy1(v SLOsBulkPurgeRollupRequestPurgePolicy1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsBulkPurgeRollupRequestPurgePolicy1 performs a merge with any union data inside the SLOsBulkPurgeRollupRequest_PurgePolicy, using the provided SLOsBulkPurgeRollupRequestPurgePolicy1 +func (t *SLOsBulkPurgeRollupRequest_PurgePolicy) MergeSLOsBulkPurgeRollupRequestPurgePolicy1(v SLOsBulkPurgeRollupRequestPurgePolicy1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsBulkPurgeRollupRequest_PurgePolicy) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsBulkPurgeRollupRequest_PurgePolicy) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsIndicatorPropertiesCustomKql returns the union data inside the SLOsCreateSloRequest_Indicator as a SLOsIndicatorPropertiesCustomKql +func (t SLOsCreateSloRequest_Indicator) AsSLOsIndicatorPropertiesCustomKql() (SLOsIndicatorPropertiesCustomKql, error) { + var body SLOsIndicatorPropertiesCustomKql + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomKql overwrites any union data inside the SLOsCreateSloRequest_Indicator as the provided SLOsIndicatorPropertiesCustomKql +func (t *SLOsCreateSloRequest_Indicator) FromSLOsIndicatorPropertiesCustomKql(v SLOsIndicatorPropertiesCustomKql) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomKql performs a merge with any union data inside the SLOsCreateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesCustomKql +func (t *SLOsCreateSloRequest_Indicator) MergeSLOsIndicatorPropertiesCustomKql(v SLOsIndicatorPropertiesCustomKql) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesApmAvailability returns the union data inside the SLOsCreateSloRequest_Indicator as a SLOsIndicatorPropertiesApmAvailability +func (t SLOsCreateSloRequest_Indicator) AsSLOsIndicatorPropertiesApmAvailability() (SLOsIndicatorPropertiesApmAvailability, error) { + var body SLOsIndicatorPropertiesApmAvailability + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesApmAvailability overwrites any union data inside the SLOsCreateSloRequest_Indicator as the provided SLOsIndicatorPropertiesApmAvailability +func (t *SLOsCreateSloRequest_Indicator) FromSLOsIndicatorPropertiesApmAvailability(v SLOsIndicatorPropertiesApmAvailability) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesApmAvailability performs a merge with any union data inside the SLOsCreateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesApmAvailability +func (t *SLOsCreateSloRequest_Indicator) MergeSLOsIndicatorPropertiesApmAvailability(v SLOsIndicatorPropertiesApmAvailability) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesApmLatency returns the union data inside the SLOsCreateSloRequest_Indicator as a SLOsIndicatorPropertiesApmLatency +func (t SLOsCreateSloRequest_Indicator) AsSLOsIndicatorPropertiesApmLatency() (SLOsIndicatorPropertiesApmLatency, error) { + var body SLOsIndicatorPropertiesApmLatency + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesApmLatency overwrites any union data inside the SLOsCreateSloRequest_Indicator as the provided SLOsIndicatorPropertiesApmLatency +func (t *SLOsCreateSloRequest_Indicator) FromSLOsIndicatorPropertiesApmLatency(v SLOsIndicatorPropertiesApmLatency) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesApmLatency performs a merge with any union data inside the SLOsCreateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesApmLatency +func (t *SLOsCreateSloRequest_Indicator) MergeSLOsIndicatorPropertiesApmLatency(v SLOsIndicatorPropertiesApmLatency) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesCustomMetric returns the union data inside the SLOsCreateSloRequest_Indicator as a SLOsIndicatorPropertiesCustomMetric +func (t SLOsCreateSloRequest_Indicator) AsSLOsIndicatorPropertiesCustomMetric() (SLOsIndicatorPropertiesCustomMetric, error) { + var body SLOsIndicatorPropertiesCustomMetric + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomMetric overwrites any union data inside the SLOsCreateSloRequest_Indicator as the provided SLOsIndicatorPropertiesCustomMetric +func (t *SLOsCreateSloRequest_Indicator) FromSLOsIndicatorPropertiesCustomMetric(v SLOsIndicatorPropertiesCustomMetric) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomMetric performs a merge with any union data inside the SLOsCreateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesCustomMetric +func (t *SLOsCreateSloRequest_Indicator) MergeSLOsIndicatorPropertiesCustomMetric(v SLOsIndicatorPropertiesCustomMetric) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesHistogram returns the union data inside the SLOsCreateSloRequest_Indicator as a SLOsIndicatorPropertiesHistogram +func (t SLOsCreateSloRequest_Indicator) AsSLOsIndicatorPropertiesHistogram() (SLOsIndicatorPropertiesHistogram, error) { + var body SLOsIndicatorPropertiesHistogram + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesHistogram overwrites any union data inside the SLOsCreateSloRequest_Indicator as the provided SLOsIndicatorPropertiesHistogram +func (t *SLOsCreateSloRequest_Indicator) FromSLOsIndicatorPropertiesHistogram(v SLOsIndicatorPropertiesHistogram) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesHistogram performs a merge with any union data inside the SLOsCreateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesHistogram +func (t *SLOsCreateSloRequest_Indicator) MergeSLOsIndicatorPropertiesHistogram(v SLOsIndicatorPropertiesHistogram) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesTimesliceMetric returns the union data inside the SLOsCreateSloRequest_Indicator as a SLOsIndicatorPropertiesTimesliceMetric +func (t SLOsCreateSloRequest_Indicator) AsSLOsIndicatorPropertiesTimesliceMetric() (SLOsIndicatorPropertiesTimesliceMetric, error) { + var body SLOsIndicatorPropertiesTimesliceMetric + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesTimesliceMetric overwrites any union data inside the SLOsCreateSloRequest_Indicator as the provided SLOsIndicatorPropertiesTimesliceMetric +func (t *SLOsCreateSloRequest_Indicator) FromSLOsIndicatorPropertiesTimesliceMetric(v SLOsIndicatorPropertiesTimesliceMetric) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesTimesliceMetric performs a merge with any union data inside the SLOsCreateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesTimesliceMetric +func (t *SLOsCreateSloRequest_Indicator) MergeSLOsIndicatorPropertiesTimesliceMetric(v SLOsIndicatorPropertiesTimesliceMetric) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsCreateSloRequest_Indicator) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsCreateSloRequest_Indicator) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsFindSloDefinitionsResponse0 returns the union data inside the SLOsFindSloDefinitionsResponse as a SLOsFindSloDefinitionsResponse0 +func (t SLOsFindSloDefinitionsResponse) AsSLOsFindSloDefinitionsResponse0() (SLOsFindSloDefinitionsResponse0, error) { + var body SLOsFindSloDefinitionsResponse0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsFindSloDefinitionsResponse0 overwrites any union data inside the SLOsFindSloDefinitionsResponse as the provided SLOsFindSloDefinitionsResponse0 +func (t *SLOsFindSloDefinitionsResponse) FromSLOsFindSloDefinitionsResponse0(v SLOsFindSloDefinitionsResponse0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsFindSloDefinitionsResponse0 performs a merge with any union data inside the SLOsFindSloDefinitionsResponse, using the provided SLOsFindSloDefinitionsResponse0 +func (t *SLOsFindSloDefinitionsResponse) MergeSLOsFindSloDefinitionsResponse0(v SLOsFindSloDefinitionsResponse0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsFindSloDefinitionsResponse1 returns the union data inside the SLOsFindSloDefinitionsResponse as a SLOsFindSloDefinitionsResponse1 +func (t SLOsFindSloDefinitionsResponse) AsSLOsFindSloDefinitionsResponse1() (SLOsFindSloDefinitionsResponse1, error) { + var body SLOsFindSloDefinitionsResponse1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsFindSloDefinitionsResponse1 overwrites any union data inside the SLOsFindSloDefinitionsResponse as the provided SLOsFindSloDefinitionsResponse1 +func (t *SLOsFindSloDefinitionsResponse) FromSLOsFindSloDefinitionsResponse1(v SLOsFindSloDefinitionsResponse1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsFindSloDefinitionsResponse1 performs a merge with any union data inside the SLOsFindSloDefinitionsResponse, using the provided SLOsFindSloDefinitionsResponse1 +func (t *SLOsFindSloDefinitionsResponse) MergeSLOsFindSloDefinitionsResponse1(v SLOsFindSloDefinitionsResponse1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsFindSloDefinitionsResponse) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsFindSloDefinitionsResponse) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsGroupBy0 returns the union data inside the SLOsGroupBy as a SLOsGroupBy0 +func (t SLOsGroupBy) AsSLOsGroupBy0() (SLOsGroupBy0, error) { + var body SLOsGroupBy0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsGroupBy0 overwrites any union data inside the SLOsGroupBy as the provided SLOsGroupBy0 +func (t *SLOsGroupBy) FromSLOsGroupBy0(v SLOsGroupBy0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsGroupBy0 performs a merge with any union data inside the SLOsGroupBy, using the provided SLOsGroupBy0 +func (t *SLOsGroupBy) MergeSLOsGroupBy0(v SLOsGroupBy0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsGroupBy1 returns the union data inside the SLOsGroupBy as a SLOsGroupBy1 +func (t SLOsGroupBy) AsSLOsGroupBy1() (SLOsGroupBy1, error) { + var body SLOsGroupBy1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsGroupBy1 overwrites any union data inside the SLOsGroupBy as the provided SLOsGroupBy1 +func (t *SLOsGroupBy) FromSLOsGroupBy1(v SLOsGroupBy1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsGroupBy1 performs a merge with any union data inside the SLOsGroupBy, using the provided SLOsGroupBy1 +func (t *SLOsGroupBy) MergeSLOsGroupBy1(v SLOsGroupBy1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsGroupBy) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsGroupBy) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0 returns the union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item as a SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0 +func (t SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item) AsSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0() (SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0, error) { + var body SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0 overwrites any union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item as the provided SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0 +func (t *SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item) FromSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0(v SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0 performs a merge with any union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item, using the provided SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0 +func (t *SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item) MergeSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0(v SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1 returns the union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item as a SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1 +func (t SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item) AsSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1() (SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1, error) { + var body SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1 overwrites any union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item as the provided SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1 +func (t *SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item) FromSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1(v SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1 performs a merge with any union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item, using the provided SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1 +func (t *SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item) MergeSLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1(v SLOsIndicatorPropertiesCustomMetricParamsGoodMetrics1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsIndicatorPropertiesCustomMetric_Params_Good_Metrics_Item) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0 returns the union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item as a SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0 +func (t SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item) AsSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0() (SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0, error) { + var body SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0 overwrites any union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item as the provided SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0 +func (t *SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item) FromSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0(v SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0 performs a merge with any union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item, using the provided SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0 +func (t *SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item) MergeSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0(v SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1 returns the union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item as a SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1 +func (t SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item) AsSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1() (SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1, error) { + var body SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1 overwrites any union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item as the provided SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1 +func (t *SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item) FromSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1(v SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1 performs a merge with any union data inside the SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item, using the provided SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1 +func (t *SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item) MergeSLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1(v SLOsIndicatorPropertiesCustomMetricParamsTotalMetrics1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsIndicatorPropertiesCustomMetric_Params_Total_Metrics_Item) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsTimesliceMetricBasicMetricWithField returns the union data inside the SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item as a SLOsTimesliceMetricBasicMetricWithField +func (t SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) AsSLOsTimesliceMetricBasicMetricWithField() (SLOsTimesliceMetricBasicMetricWithField, error) { + var body SLOsTimesliceMetricBasicMetricWithField + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsTimesliceMetricBasicMetricWithField overwrites any union data inside the SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item as the provided SLOsTimesliceMetricBasicMetricWithField +func (t *SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) FromSLOsTimesliceMetricBasicMetricWithField(v SLOsTimesliceMetricBasicMetricWithField) error { + v.Aggregation = "max" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsTimesliceMetricBasicMetricWithField performs a merge with any union data inside the SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item, using the provided SLOsTimesliceMetricBasicMetricWithField +func (t *SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) MergeSLOsTimesliceMetricBasicMetricWithField(v SLOsTimesliceMetricBasicMetricWithField) error { + v.Aggregation = "max" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsTimesliceMetricPercentileMetric returns the union data inside the SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item as a SLOsTimesliceMetricPercentileMetric +func (t SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) AsSLOsTimesliceMetricPercentileMetric() (SLOsTimesliceMetricPercentileMetric, error) { + var body SLOsTimesliceMetricPercentileMetric + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsTimesliceMetricPercentileMetric overwrites any union data inside the SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item as the provided SLOsTimesliceMetricPercentileMetric +func (t *SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) FromSLOsTimesliceMetricPercentileMetric(v SLOsTimesliceMetricPercentileMetric) error { + v.Aggregation = "percentile" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsTimesliceMetricPercentileMetric performs a merge with any union data inside the SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item, using the provided SLOsTimesliceMetricPercentileMetric +func (t *SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) MergeSLOsTimesliceMetricPercentileMetric(v SLOsTimesliceMetricPercentileMetric) error { + v.Aggregation = "percentile" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsTimesliceMetricDocCountMetric returns the union data inside the SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item as a SLOsTimesliceMetricDocCountMetric +func (t SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) AsSLOsTimesliceMetricDocCountMetric() (SLOsTimesliceMetricDocCountMetric, error) { + var body SLOsTimesliceMetricDocCountMetric + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsTimesliceMetricDocCountMetric overwrites any union data inside the SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item as the provided SLOsTimesliceMetricDocCountMetric +func (t *SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) FromSLOsTimesliceMetricDocCountMetric(v SLOsTimesliceMetricDocCountMetric) error { + v.Aggregation = "doc_count" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsTimesliceMetricDocCountMetric performs a merge with any union data inside the SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item, using the provided SLOsTimesliceMetricDocCountMetric +func (t *SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) MergeSLOsTimesliceMetricDocCountMetric(v SLOsTimesliceMetricDocCountMetric) error { + v.Aggregation = "doc_count" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"aggregation"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err +} + +func (t SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case "doc_count": + return t.AsSLOsTimesliceMetricDocCountMetric() + case "max": + return t.AsSLOsTimesliceMetricBasicMetricWithField() + case "percentile": + return t.AsSLOsTimesliceMetricPercentileMetric() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } +} + +func (t SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsIndicatorPropertiesTimesliceMetric_Params_Metric_Metrics_Item) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsKqlWithFilters0 returns the union data inside the SLOsKqlWithFilters as a SLOsKqlWithFilters0 +func (t SLOsKqlWithFilters) AsSLOsKqlWithFilters0() (SLOsKqlWithFilters0, error) { + var body SLOsKqlWithFilters0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsKqlWithFilters0 overwrites any union data inside the SLOsKqlWithFilters as the provided SLOsKqlWithFilters0 +func (t *SLOsKqlWithFilters) FromSLOsKqlWithFilters0(v SLOsKqlWithFilters0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsKqlWithFilters0 performs a merge with any union data inside the SLOsKqlWithFilters, using the provided SLOsKqlWithFilters0 +func (t *SLOsKqlWithFilters) MergeSLOsKqlWithFilters0(v SLOsKqlWithFilters0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsKqlWithFilters1 returns the union data inside the SLOsKqlWithFilters as a SLOsKqlWithFilters1 +func (t SLOsKqlWithFilters) AsSLOsKqlWithFilters1() (SLOsKqlWithFilters1, error) { + var body SLOsKqlWithFilters1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsKqlWithFilters1 overwrites any union data inside the SLOsKqlWithFilters as the provided SLOsKqlWithFilters1 +func (t *SLOsKqlWithFilters) FromSLOsKqlWithFilters1(v SLOsKqlWithFilters1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsKqlWithFilters1 performs a merge with any union data inside the SLOsKqlWithFilters, using the provided SLOsKqlWithFilters1 +func (t *SLOsKqlWithFilters) MergeSLOsKqlWithFilters1(v SLOsKqlWithFilters1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsKqlWithFilters) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsKqlWithFilters) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsKqlWithFiltersGood0 returns the union data inside the SLOsKqlWithFiltersGood as a SLOsKqlWithFiltersGood0 +func (t SLOsKqlWithFiltersGood) AsSLOsKqlWithFiltersGood0() (SLOsKqlWithFiltersGood0, error) { + var body SLOsKqlWithFiltersGood0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsKqlWithFiltersGood0 overwrites any union data inside the SLOsKqlWithFiltersGood as the provided SLOsKqlWithFiltersGood0 +func (t *SLOsKqlWithFiltersGood) FromSLOsKqlWithFiltersGood0(v SLOsKqlWithFiltersGood0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsKqlWithFiltersGood0 performs a merge with any union data inside the SLOsKqlWithFiltersGood, using the provided SLOsKqlWithFiltersGood0 +func (t *SLOsKqlWithFiltersGood) MergeSLOsKqlWithFiltersGood0(v SLOsKqlWithFiltersGood0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsKqlWithFiltersGood1 returns the union data inside the SLOsKqlWithFiltersGood as a SLOsKqlWithFiltersGood1 +func (t SLOsKqlWithFiltersGood) AsSLOsKqlWithFiltersGood1() (SLOsKqlWithFiltersGood1, error) { + var body SLOsKqlWithFiltersGood1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsKqlWithFiltersGood1 overwrites any union data inside the SLOsKqlWithFiltersGood as the provided SLOsKqlWithFiltersGood1 +func (t *SLOsKqlWithFiltersGood) FromSLOsKqlWithFiltersGood1(v SLOsKqlWithFiltersGood1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsKqlWithFiltersGood1 performs a merge with any union data inside the SLOsKqlWithFiltersGood, using the provided SLOsKqlWithFiltersGood1 +func (t *SLOsKqlWithFiltersGood) MergeSLOsKqlWithFiltersGood1(v SLOsKqlWithFiltersGood1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsKqlWithFiltersGood) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsKqlWithFiltersGood) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsKqlWithFiltersTotal0 returns the union data inside the SLOsKqlWithFiltersTotal as a SLOsKqlWithFiltersTotal0 +func (t SLOsKqlWithFiltersTotal) AsSLOsKqlWithFiltersTotal0() (SLOsKqlWithFiltersTotal0, error) { + var body SLOsKqlWithFiltersTotal0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsKqlWithFiltersTotal0 overwrites any union data inside the SLOsKqlWithFiltersTotal as the provided SLOsKqlWithFiltersTotal0 +func (t *SLOsKqlWithFiltersTotal) FromSLOsKqlWithFiltersTotal0(v SLOsKqlWithFiltersTotal0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsKqlWithFiltersTotal0 performs a merge with any union data inside the SLOsKqlWithFiltersTotal, using the provided SLOsKqlWithFiltersTotal0 +func (t *SLOsKqlWithFiltersTotal) MergeSLOsKqlWithFiltersTotal0(v SLOsKqlWithFiltersTotal0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsKqlWithFiltersTotal1 returns the union data inside the SLOsKqlWithFiltersTotal as a SLOsKqlWithFiltersTotal1 +func (t SLOsKqlWithFiltersTotal) AsSLOsKqlWithFiltersTotal1() (SLOsKqlWithFiltersTotal1, error) { + var body SLOsKqlWithFiltersTotal1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsKqlWithFiltersTotal1 overwrites any union data inside the SLOsKqlWithFiltersTotal as the provided SLOsKqlWithFiltersTotal1 +func (t *SLOsKqlWithFiltersTotal) FromSLOsKqlWithFiltersTotal1(v SLOsKqlWithFiltersTotal1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsKqlWithFiltersTotal1 performs a merge with any union data inside the SLOsKqlWithFiltersTotal, using the provided SLOsKqlWithFiltersTotal1 +func (t *SLOsKqlWithFiltersTotal) MergeSLOsKqlWithFiltersTotal1(v SLOsKqlWithFiltersTotal1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsKqlWithFiltersTotal) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsKqlWithFiltersTotal) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsIndicatorPropertiesCustomKql returns the union data inside the SLOsSloDefinitionResponse_Indicator as a SLOsIndicatorPropertiesCustomKql +func (t SLOsSloDefinitionResponse_Indicator) AsSLOsIndicatorPropertiesCustomKql() (SLOsIndicatorPropertiesCustomKql, error) { + var body SLOsIndicatorPropertiesCustomKql + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomKql overwrites any union data inside the SLOsSloDefinitionResponse_Indicator as the provided SLOsIndicatorPropertiesCustomKql +func (t *SLOsSloDefinitionResponse_Indicator) FromSLOsIndicatorPropertiesCustomKql(v SLOsIndicatorPropertiesCustomKql) error { + v.Type = "sli.kql.custom" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomKql performs a merge with any union data inside the SLOsSloDefinitionResponse_Indicator, using the provided SLOsIndicatorPropertiesCustomKql +func (t *SLOsSloDefinitionResponse_Indicator) MergeSLOsIndicatorPropertiesCustomKql(v SLOsIndicatorPropertiesCustomKql) error { + v.Type = "sli.kql.custom" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesApmAvailability returns the union data inside the SLOsSloDefinitionResponse_Indicator as a SLOsIndicatorPropertiesApmAvailability +func (t SLOsSloDefinitionResponse_Indicator) AsSLOsIndicatorPropertiesApmAvailability() (SLOsIndicatorPropertiesApmAvailability, error) { + var body SLOsIndicatorPropertiesApmAvailability + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesApmAvailability overwrites any union data inside the SLOsSloDefinitionResponse_Indicator as the provided SLOsIndicatorPropertiesApmAvailability +func (t *SLOsSloDefinitionResponse_Indicator) FromSLOsIndicatorPropertiesApmAvailability(v SLOsIndicatorPropertiesApmAvailability) error { + v.Type = "sli.apm.transactionErrorRate" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesApmAvailability performs a merge with any union data inside the SLOsSloDefinitionResponse_Indicator, using the provided SLOsIndicatorPropertiesApmAvailability +func (t *SLOsSloDefinitionResponse_Indicator) MergeSLOsIndicatorPropertiesApmAvailability(v SLOsIndicatorPropertiesApmAvailability) error { + v.Type = "sli.apm.transactionErrorRate" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesApmLatency returns the union data inside the SLOsSloDefinitionResponse_Indicator as a SLOsIndicatorPropertiesApmLatency +func (t SLOsSloDefinitionResponse_Indicator) AsSLOsIndicatorPropertiesApmLatency() (SLOsIndicatorPropertiesApmLatency, error) { + var body SLOsIndicatorPropertiesApmLatency + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesApmLatency overwrites any union data inside the SLOsSloDefinitionResponse_Indicator as the provided SLOsIndicatorPropertiesApmLatency +func (t *SLOsSloDefinitionResponse_Indicator) FromSLOsIndicatorPropertiesApmLatency(v SLOsIndicatorPropertiesApmLatency) error { + v.Type = "sli.apm.transactionDuration" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesApmLatency performs a merge with any union data inside the SLOsSloDefinitionResponse_Indicator, using the provided SLOsIndicatorPropertiesApmLatency +func (t *SLOsSloDefinitionResponse_Indicator) MergeSLOsIndicatorPropertiesApmLatency(v SLOsIndicatorPropertiesApmLatency) error { + v.Type = "sli.apm.transactionDuration" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesCustomMetric returns the union data inside the SLOsSloDefinitionResponse_Indicator as a SLOsIndicatorPropertiesCustomMetric +func (t SLOsSloDefinitionResponse_Indicator) AsSLOsIndicatorPropertiesCustomMetric() (SLOsIndicatorPropertiesCustomMetric, error) { + var body SLOsIndicatorPropertiesCustomMetric + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomMetric overwrites any union data inside the SLOsSloDefinitionResponse_Indicator as the provided SLOsIndicatorPropertiesCustomMetric +func (t *SLOsSloDefinitionResponse_Indicator) FromSLOsIndicatorPropertiesCustomMetric(v SLOsIndicatorPropertiesCustomMetric) error { + v.Type = "sli.metric.custom" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomMetric performs a merge with any union data inside the SLOsSloDefinitionResponse_Indicator, using the provided SLOsIndicatorPropertiesCustomMetric +func (t *SLOsSloDefinitionResponse_Indicator) MergeSLOsIndicatorPropertiesCustomMetric(v SLOsIndicatorPropertiesCustomMetric) error { + v.Type = "sli.metric.custom" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesHistogram returns the union data inside the SLOsSloDefinitionResponse_Indicator as a SLOsIndicatorPropertiesHistogram +func (t SLOsSloDefinitionResponse_Indicator) AsSLOsIndicatorPropertiesHistogram() (SLOsIndicatorPropertiesHistogram, error) { + var body SLOsIndicatorPropertiesHistogram + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesHistogram overwrites any union data inside the SLOsSloDefinitionResponse_Indicator as the provided SLOsIndicatorPropertiesHistogram +func (t *SLOsSloDefinitionResponse_Indicator) FromSLOsIndicatorPropertiesHistogram(v SLOsIndicatorPropertiesHistogram) error { + v.Type = "sli.histogram.custom" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesHistogram performs a merge with any union data inside the SLOsSloDefinitionResponse_Indicator, using the provided SLOsIndicatorPropertiesHistogram +func (t *SLOsSloDefinitionResponse_Indicator) MergeSLOsIndicatorPropertiesHistogram(v SLOsIndicatorPropertiesHistogram) error { + v.Type = "sli.histogram.custom" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesTimesliceMetric returns the union data inside the SLOsSloDefinitionResponse_Indicator as a SLOsIndicatorPropertiesTimesliceMetric +func (t SLOsSloDefinitionResponse_Indicator) AsSLOsIndicatorPropertiesTimesliceMetric() (SLOsIndicatorPropertiesTimesliceMetric, error) { + var body SLOsIndicatorPropertiesTimesliceMetric + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesTimesliceMetric overwrites any union data inside the SLOsSloDefinitionResponse_Indicator as the provided SLOsIndicatorPropertiesTimesliceMetric +func (t *SLOsSloDefinitionResponse_Indicator) FromSLOsIndicatorPropertiesTimesliceMetric(v SLOsIndicatorPropertiesTimesliceMetric) error { + v.Type = "sli.metric.timeslice" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesTimesliceMetric performs a merge with any union data inside the SLOsSloDefinitionResponse_Indicator, using the provided SLOsIndicatorPropertiesTimesliceMetric +func (t *SLOsSloDefinitionResponse_Indicator) MergeSLOsIndicatorPropertiesTimesliceMetric(v SLOsIndicatorPropertiesTimesliceMetric) error { + v.Type = "sli.metric.timeslice" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsSloDefinitionResponse_Indicator) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"type"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err +} + +func (t SLOsSloDefinitionResponse_Indicator) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case "sli.apm.transactionDuration": + return t.AsSLOsIndicatorPropertiesApmLatency() + case "sli.apm.transactionErrorRate": + return t.AsSLOsIndicatorPropertiesApmAvailability() + case "sli.histogram.custom": + return t.AsSLOsIndicatorPropertiesHistogram() + case "sli.kql.custom": + return t.AsSLOsIndicatorPropertiesCustomKql() + case "sli.metric.custom": + return t.AsSLOsIndicatorPropertiesCustomMetric() + case "sli.metric.timeslice": + return t.AsSLOsIndicatorPropertiesTimesliceMetric() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } +} + +func (t SLOsSloDefinitionResponse_Indicator) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsSloDefinitionResponse_Indicator) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsIndicatorPropertiesCustomKql returns the union data inside the SLOsSloWithSummaryResponse_Indicator as a SLOsIndicatorPropertiesCustomKql +func (t SLOsSloWithSummaryResponse_Indicator) AsSLOsIndicatorPropertiesCustomKql() (SLOsIndicatorPropertiesCustomKql, error) { + var body SLOsIndicatorPropertiesCustomKql + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomKql overwrites any union data inside the SLOsSloWithSummaryResponse_Indicator as the provided SLOsIndicatorPropertiesCustomKql +func (t *SLOsSloWithSummaryResponse_Indicator) FromSLOsIndicatorPropertiesCustomKql(v SLOsIndicatorPropertiesCustomKql) error { + v.Type = "sli.kql.custom" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomKql performs a merge with any union data inside the SLOsSloWithSummaryResponse_Indicator, using the provided SLOsIndicatorPropertiesCustomKql +func (t *SLOsSloWithSummaryResponse_Indicator) MergeSLOsIndicatorPropertiesCustomKql(v SLOsIndicatorPropertiesCustomKql) error { + v.Type = "sli.kql.custom" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesApmAvailability returns the union data inside the SLOsSloWithSummaryResponse_Indicator as a SLOsIndicatorPropertiesApmAvailability +func (t SLOsSloWithSummaryResponse_Indicator) AsSLOsIndicatorPropertiesApmAvailability() (SLOsIndicatorPropertiesApmAvailability, error) { + var body SLOsIndicatorPropertiesApmAvailability + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesApmAvailability overwrites any union data inside the SLOsSloWithSummaryResponse_Indicator as the provided SLOsIndicatorPropertiesApmAvailability +func (t *SLOsSloWithSummaryResponse_Indicator) FromSLOsIndicatorPropertiesApmAvailability(v SLOsIndicatorPropertiesApmAvailability) error { + v.Type = "sli.apm.transactionErrorRate" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesApmAvailability performs a merge with any union data inside the SLOsSloWithSummaryResponse_Indicator, using the provided SLOsIndicatorPropertiesApmAvailability +func (t *SLOsSloWithSummaryResponse_Indicator) MergeSLOsIndicatorPropertiesApmAvailability(v SLOsIndicatorPropertiesApmAvailability) error { + v.Type = "sli.apm.transactionErrorRate" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesApmLatency returns the union data inside the SLOsSloWithSummaryResponse_Indicator as a SLOsIndicatorPropertiesApmLatency +func (t SLOsSloWithSummaryResponse_Indicator) AsSLOsIndicatorPropertiesApmLatency() (SLOsIndicatorPropertiesApmLatency, error) { + var body SLOsIndicatorPropertiesApmLatency + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesApmLatency overwrites any union data inside the SLOsSloWithSummaryResponse_Indicator as the provided SLOsIndicatorPropertiesApmLatency +func (t *SLOsSloWithSummaryResponse_Indicator) FromSLOsIndicatorPropertiesApmLatency(v SLOsIndicatorPropertiesApmLatency) error { + v.Type = "sli.apm.transactionDuration" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesApmLatency performs a merge with any union data inside the SLOsSloWithSummaryResponse_Indicator, using the provided SLOsIndicatorPropertiesApmLatency +func (t *SLOsSloWithSummaryResponse_Indicator) MergeSLOsIndicatorPropertiesApmLatency(v SLOsIndicatorPropertiesApmLatency) error { + v.Type = "sli.apm.transactionDuration" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesCustomMetric returns the union data inside the SLOsSloWithSummaryResponse_Indicator as a SLOsIndicatorPropertiesCustomMetric +func (t SLOsSloWithSummaryResponse_Indicator) AsSLOsIndicatorPropertiesCustomMetric() (SLOsIndicatorPropertiesCustomMetric, error) { + var body SLOsIndicatorPropertiesCustomMetric + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomMetric overwrites any union data inside the SLOsSloWithSummaryResponse_Indicator as the provided SLOsIndicatorPropertiesCustomMetric +func (t *SLOsSloWithSummaryResponse_Indicator) FromSLOsIndicatorPropertiesCustomMetric(v SLOsIndicatorPropertiesCustomMetric) error { + v.Type = "sli.metric.custom" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomMetric performs a merge with any union data inside the SLOsSloWithSummaryResponse_Indicator, using the provided SLOsIndicatorPropertiesCustomMetric +func (t *SLOsSloWithSummaryResponse_Indicator) MergeSLOsIndicatorPropertiesCustomMetric(v SLOsIndicatorPropertiesCustomMetric) error { + v.Type = "sli.metric.custom" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesHistogram returns the union data inside the SLOsSloWithSummaryResponse_Indicator as a SLOsIndicatorPropertiesHistogram +func (t SLOsSloWithSummaryResponse_Indicator) AsSLOsIndicatorPropertiesHistogram() (SLOsIndicatorPropertiesHistogram, error) { + var body SLOsIndicatorPropertiesHistogram + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesHistogram overwrites any union data inside the SLOsSloWithSummaryResponse_Indicator as the provided SLOsIndicatorPropertiesHistogram +func (t *SLOsSloWithSummaryResponse_Indicator) FromSLOsIndicatorPropertiesHistogram(v SLOsIndicatorPropertiesHistogram) error { + v.Type = "sli.histogram.custom" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesHistogram performs a merge with any union data inside the SLOsSloWithSummaryResponse_Indicator, using the provided SLOsIndicatorPropertiesHistogram +func (t *SLOsSloWithSummaryResponse_Indicator) MergeSLOsIndicatorPropertiesHistogram(v SLOsIndicatorPropertiesHistogram) error { + v.Type = "sli.histogram.custom" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesTimesliceMetric returns the union data inside the SLOsSloWithSummaryResponse_Indicator as a SLOsIndicatorPropertiesTimesliceMetric +func (t SLOsSloWithSummaryResponse_Indicator) AsSLOsIndicatorPropertiesTimesliceMetric() (SLOsIndicatorPropertiesTimesliceMetric, error) { + var body SLOsIndicatorPropertiesTimesliceMetric + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesTimesliceMetric overwrites any union data inside the SLOsSloWithSummaryResponse_Indicator as the provided SLOsIndicatorPropertiesTimesliceMetric +func (t *SLOsSloWithSummaryResponse_Indicator) FromSLOsIndicatorPropertiesTimesliceMetric(v SLOsIndicatorPropertiesTimesliceMetric) error { + v.Type = "sli.metric.timeslice" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesTimesliceMetric performs a merge with any union data inside the SLOsSloWithSummaryResponse_Indicator, using the provided SLOsIndicatorPropertiesTimesliceMetric +func (t *SLOsSloWithSummaryResponse_Indicator) MergeSLOsIndicatorPropertiesTimesliceMetric(v SLOsIndicatorPropertiesTimesliceMetric) error { + v.Type = "sli.metric.timeslice" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsSloWithSummaryResponse_Indicator) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"type"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err +} + +func (t SLOsSloWithSummaryResponse_Indicator) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case "sli.apm.transactionDuration": + return t.AsSLOsIndicatorPropertiesApmLatency() + case "sli.apm.transactionErrorRate": + return t.AsSLOsIndicatorPropertiesApmAvailability() + case "sli.histogram.custom": + return t.AsSLOsIndicatorPropertiesHistogram() + case "sli.kql.custom": + return t.AsSLOsIndicatorPropertiesCustomKql() + case "sli.metric.custom": + return t.AsSLOsIndicatorPropertiesCustomMetric() + case "sli.metric.timeslice": + return t.AsSLOsIndicatorPropertiesTimesliceMetric() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } +} + +func (t SLOsSloWithSummaryResponse_Indicator) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsSloWithSummaryResponse_Indicator) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSLOsIndicatorPropertiesCustomKql returns the union data inside the SLOsUpdateSloRequest_Indicator as a SLOsIndicatorPropertiesCustomKql +func (t SLOsUpdateSloRequest_Indicator) AsSLOsIndicatorPropertiesCustomKql() (SLOsIndicatorPropertiesCustomKql, error) { + var body SLOsIndicatorPropertiesCustomKql + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomKql overwrites any union data inside the SLOsUpdateSloRequest_Indicator as the provided SLOsIndicatorPropertiesCustomKql +func (t *SLOsUpdateSloRequest_Indicator) FromSLOsIndicatorPropertiesCustomKql(v SLOsIndicatorPropertiesCustomKql) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomKql performs a merge with any union data inside the SLOsUpdateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesCustomKql +func (t *SLOsUpdateSloRequest_Indicator) MergeSLOsIndicatorPropertiesCustomKql(v SLOsIndicatorPropertiesCustomKql) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesApmAvailability returns the union data inside the SLOsUpdateSloRequest_Indicator as a SLOsIndicatorPropertiesApmAvailability +func (t SLOsUpdateSloRequest_Indicator) AsSLOsIndicatorPropertiesApmAvailability() (SLOsIndicatorPropertiesApmAvailability, error) { + var body SLOsIndicatorPropertiesApmAvailability + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesApmAvailability overwrites any union data inside the SLOsUpdateSloRequest_Indicator as the provided SLOsIndicatorPropertiesApmAvailability +func (t *SLOsUpdateSloRequest_Indicator) FromSLOsIndicatorPropertiesApmAvailability(v SLOsIndicatorPropertiesApmAvailability) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesApmAvailability performs a merge with any union data inside the SLOsUpdateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesApmAvailability +func (t *SLOsUpdateSloRequest_Indicator) MergeSLOsIndicatorPropertiesApmAvailability(v SLOsIndicatorPropertiesApmAvailability) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesApmLatency returns the union data inside the SLOsUpdateSloRequest_Indicator as a SLOsIndicatorPropertiesApmLatency +func (t SLOsUpdateSloRequest_Indicator) AsSLOsIndicatorPropertiesApmLatency() (SLOsIndicatorPropertiesApmLatency, error) { + var body SLOsIndicatorPropertiesApmLatency + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesApmLatency overwrites any union data inside the SLOsUpdateSloRequest_Indicator as the provided SLOsIndicatorPropertiesApmLatency +func (t *SLOsUpdateSloRequest_Indicator) FromSLOsIndicatorPropertiesApmLatency(v SLOsIndicatorPropertiesApmLatency) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesApmLatency performs a merge with any union data inside the SLOsUpdateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesApmLatency +func (t *SLOsUpdateSloRequest_Indicator) MergeSLOsIndicatorPropertiesApmLatency(v SLOsIndicatorPropertiesApmLatency) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesCustomMetric returns the union data inside the SLOsUpdateSloRequest_Indicator as a SLOsIndicatorPropertiesCustomMetric +func (t SLOsUpdateSloRequest_Indicator) AsSLOsIndicatorPropertiesCustomMetric() (SLOsIndicatorPropertiesCustomMetric, error) { + var body SLOsIndicatorPropertiesCustomMetric + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesCustomMetric overwrites any union data inside the SLOsUpdateSloRequest_Indicator as the provided SLOsIndicatorPropertiesCustomMetric +func (t *SLOsUpdateSloRequest_Indicator) FromSLOsIndicatorPropertiesCustomMetric(v SLOsIndicatorPropertiesCustomMetric) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesCustomMetric performs a merge with any union data inside the SLOsUpdateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesCustomMetric +func (t *SLOsUpdateSloRequest_Indicator) MergeSLOsIndicatorPropertiesCustomMetric(v SLOsIndicatorPropertiesCustomMetric) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesHistogram returns the union data inside the SLOsUpdateSloRequest_Indicator as a SLOsIndicatorPropertiesHistogram +func (t SLOsUpdateSloRequest_Indicator) AsSLOsIndicatorPropertiesHistogram() (SLOsIndicatorPropertiesHistogram, error) { + var body SLOsIndicatorPropertiesHistogram + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesHistogram overwrites any union data inside the SLOsUpdateSloRequest_Indicator as the provided SLOsIndicatorPropertiesHistogram +func (t *SLOsUpdateSloRequest_Indicator) FromSLOsIndicatorPropertiesHistogram(v SLOsIndicatorPropertiesHistogram) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesHistogram performs a merge with any union data inside the SLOsUpdateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesHistogram +func (t *SLOsUpdateSloRequest_Indicator) MergeSLOsIndicatorPropertiesHistogram(v SLOsIndicatorPropertiesHistogram) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSLOsIndicatorPropertiesTimesliceMetric returns the union data inside the SLOsUpdateSloRequest_Indicator as a SLOsIndicatorPropertiesTimesliceMetric +func (t SLOsUpdateSloRequest_Indicator) AsSLOsIndicatorPropertiesTimesliceMetric() (SLOsIndicatorPropertiesTimesliceMetric, error) { + var body SLOsIndicatorPropertiesTimesliceMetric + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSLOsIndicatorPropertiesTimesliceMetric overwrites any union data inside the SLOsUpdateSloRequest_Indicator as the provided SLOsIndicatorPropertiesTimesliceMetric +func (t *SLOsUpdateSloRequest_Indicator) FromSLOsIndicatorPropertiesTimesliceMetric(v SLOsIndicatorPropertiesTimesliceMetric) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSLOsIndicatorPropertiesTimesliceMetric performs a merge with any union data inside the SLOsUpdateSloRequest_Indicator, using the provided SLOsIndicatorPropertiesTimesliceMetric +func (t *SLOsUpdateSloRequest_Indicator) MergeSLOsIndicatorPropertiesTimesliceMetric(v SLOsIndicatorPropertiesTimesliceMetric) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SLOsUpdateSloRequest_Indicator) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SLOsUpdateSloRequest_Indicator) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityAIAssistantAPIKnowledgeBaseEntryContentReference returns the union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as a SecurityAIAssistantAPIKnowledgeBaseEntryContentReference +func (t SecurityAIAssistantAPIContentReferences_AdditionalProperties) AsSecurityAIAssistantAPIKnowledgeBaseEntryContentReference() (SecurityAIAssistantAPIKnowledgeBaseEntryContentReference, error) { + var body SecurityAIAssistantAPIKnowledgeBaseEntryContentReference + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIKnowledgeBaseEntryContentReference overwrites any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as the provided SecurityAIAssistantAPIKnowledgeBaseEntryContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) FromSecurityAIAssistantAPIKnowledgeBaseEntryContentReference(v SecurityAIAssistantAPIKnowledgeBaseEntryContentReference) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIKnowledgeBaseEntryContentReference performs a merge with any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties, using the provided SecurityAIAssistantAPIKnowledgeBaseEntryContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) MergeSecurityAIAssistantAPIKnowledgeBaseEntryContentReference(v SecurityAIAssistantAPIKnowledgeBaseEntryContentReference) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityAIAssistantAPISecurityAlertContentReference returns the union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as a SecurityAIAssistantAPISecurityAlertContentReference +func (t SecurityAIAssistantAPIContentReferences_AdditionalProperties) AsSecurityAIAssistantAPISecurityAlertContentReference() (SecurityAIAssistantAPISecurityAlertContentReference, error) { + var body SecurityAIAssistantAPISecurityAlertContentReference + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPISecurityAlertContentReference overwrites any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as the provided SecurityAIAssistantAPISecurityAlertContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) FromSecurityAIAssistantAPISecurityAlertContentReference(v SecurityAIAssistantAPISecurityAlertContentReference) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPISecurityAlertContentReference performs a merge with any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties, using the provided SecurityAIAssistantAPISecurityAlertContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) MergeSecurityAIAssistantAPISecurityAlertContentReference(v SecurityAIAssistantAPISecurityAlertContentReference) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityAIAssistantAPISecurityAlertsPageContentReference returns the union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as a SecurityAIAssistantAPISecurityAlertsPageContentReference +func (t SecurityAIAssistantAPIContentReferences_AdditionalProperties) AsSecurityAIAssistantAPISecurityAlertsPageContentReference() (SecurityAIAssistantAPISecurityAlertsPageContentReference, error) { + var body SecurityAIAssistantAPISecurityAlertsPageContentReference + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPISecurityAlertsPageContentReference overwrites any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as the provided SecurityAIAssistantAPISecurityAlertsPageContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) FromSecurityAIAssistantAPISecurityAlertsPageContentReference(v SecurityAIAssistantAPISecurityAlertsPageContentReference) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPISecurityAlertsPageContentReference performs a merge with any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties, using the provided SecurityAIAssistantAPISecurityAlertsPageContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) MergeSecurityAIAssistantAPISecurityAlertsPageContentReference(v SecurityAIAssistantAPISecurityAlertsPageContentReference) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityAIAssistantAPIProductDocumentationContentReference returns the union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as a SecurityAIAssistantAPIProductDocumentationContentReference +func (t SecurityAIAssistantAPIContentReferences_AdditionalProperties) AsSecurityAIAssistantAPIProductDocumentationContentReference() (SecurityAIAssistantAPIProductDocumentationContentReference, error) { + var body SecurityAIAssistantAPIProductDocumentationContentReference + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIProductDocumentationContentReference overwrites any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as the provided SecurityAIAssistantAPIProductDocumentationContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) FromSecurityAIAssistantAPIProductDocumentationContentReference(v SecurityAIAssistantAPIProductDocumentationContentReference) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIProductDocumentationContentReference performs a merge with any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties, using the provided SecurityAIAssistantAPIProductDocumentationContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) MergeSecurityAIAssistantAPIProductDocumentationContentReference(v SecurityAIAssistantAPIProductDocumentationContentReference) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityAIAssistantAPIEsqlContentReference returns the union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as a SecurityAIAssistantAPIEsqlContentReference +func (t SecurityAIAssistantAPIContentReferences_AdditionalProperties) AsSecurityAIAssistantAPIEsqlContentReference() (SecurityAIAssistantAPIEsqlContentReference, error) { + var body SecurityAIAssistantAPIEsqlContentReference + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIEsqlContentReference overwrites any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as the provided SecurityAIAssistantAPIEsqlContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) FromSecurityAIAssistantAPIEsqlContentReference(v SecurityAIAssistantAPIEsqlContentReference) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIEsqlContentReference performs a merge with any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties, using the provided SecurityAIAssistantAPIEsqlContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) MergeSecurityAIAssistantAPIEsqlContentReference(v SecurityAIAssistantAPIEsqlContentReference) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityAIAssistantAPIHrefContentReference returns the union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as a SecurityAIAssistantAPIHrefContentReference +func (t SecurityAIAssistantAPIContentReferences_AdditionalProperties) AsSecurityAIAssistantAPIHrefContentReference() (SecurityAIAssistantAPIHrefContentReference, error) { + var body SecurityAIAssistantAPIHrefContentReference + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIHrefContentReference overwrites any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties as the provided SecurityAIAssistantAPIHrefContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) FromSecurityAIAssistantAPIHrefContentReference(v SecurityAIAssistantAPIHrefContentReference) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIHrefContentReference performs a merge with any union data inside the SecurityAIAssistantAPIContentReferences_AdditionalProperties, using the provided SecurityAIAssistantAPIHrefContentReference +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) MergeSecurityAIAssistantAPIHrefContentReference(v SecurityAIAssistantAPIHrefContentReference) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityAIAssistantAPIContentReferences_AdditionalProperties) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityAIAssistantAPIContentReferences_AdditionalProperties) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityAIAssistantAPIDocumentEntryCreateFields returns the union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps as a SecurityAIAssistantAPIDocumentEntryCreateFields +func (t SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps) AsSecurityAIAssistantAPIDocumentEntryCreateFields() (SecurityAIAssistantAPIDocumentEntryCreateFields, error) { + var body SecurityAIAssistantAPIDocumentEntryCreateFields + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIDocumentEntryCreateFields overwrites any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps as the provided SecurityAIAssistantAPIDocumentEntryCreateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps) FromSecurityAIAssistantAPIDocumentEntryCreateFields(v SecurityAIAssistantAPIDocumentEntryCreateFields) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIDocumentEntryCreateFields performs a merge with any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps, using the provided SecurityAIAssistantAPIDocumentEntryCreateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps) MergeSecurityAIAssistantAPIDocumentEntryCreateFields(v SecurityAIAssistantAPIDocumentEntryCreateFields) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityAIAssistantAPIIndexEntryCreateFields returns the union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps as a SecurityAIAssistantAPIIndexEntryCreateFields +func (t SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps) AsSecurityAIAssistantAPIIndexEntryCreateFields() (SecurityAIAssistantAPIIndexEntryCreateFields, error) { + var body SecurityAIAssistantAPIIndexEntryCreateFields + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIIndexEntryCreateFields overwrites any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps as the provided SecurityAIAssistantAPIIndexEntryCreateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps) FromSecurityAIAssistantAPIIndexEntryCreateFields(v SecurityAIAssistantAPIIndexEntryCreateFields) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIIndexEntryCreateFields performs a merge with any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps, using the provided SecurityAIAssistantAPIIndexEntryCreateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps) MergeSecurityAIAssistantAPIIndexEntryCreateFields(v SecurityAIAssistantAPIIndexEntryCreateFields) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryCreateProps) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityAIAssistantAPIDocumentEntry returns the union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryResponse as a SecurityAIAssistantAPIDocumentEntry +func (t SecurityAIAssistantAPIKnowledgeBaseEntryResponse) AsSecurityAIAssistantAPIDocumentEntry() (SecurityAIAssistantAPIDocumentEntry, error) { + var body SecurityAIAssistantAPIDocumentEntry + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIDocumentEntry overwrites any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryResponse as the provided SecurityAIAssistantAPIDocumentEntry +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryResponse) FromSecurityAIAssistantAPIDocumentEntry(v SecurityAIAssistantAPIDocumentEntry) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIDocumentEntry performs a merge with any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryResponse, using the provided SecurityAIAssistantAPIDocumentEntry +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryResponse) MergeSecurityAIAssistantAPIDocumentEntry(v SecurityAIAssistantAPIDocumentEntry) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityAIAssistantAPIIndexEntry returns the union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryResponse as a SecurityAIAssistantAPIIndexEntry +func (t SecurityAIAssistantAPIKnowledgeBaseEntryResponse) AsSecurityAIAssistantAPIIndexEntry() (SecurityAIAssistantAPIIndexEntry, error) { + var body SecurityAIAssistantAPIIndexEntry + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIIndexEntry overwrites any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryResponse as the provided SecurityAIAssistantAPIIndexEntry +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryResponse) FromSecurityAIAssistantAPIIndexEntry(v SecurityAIAssistantAPIIndexEntry) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIIndexEntry performs a merge with any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryResponse, using the provided SecurityAIAssistantAPIIndexEntry +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryResponse) MergeSecurityAIAssistantAPIIndexEntry(v SecurityAIAssistantAPIIndexEntry) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityAIAssistantAPIKnowledgeBaseEntryResponse) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryResponse) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityAIAssistantAPIDocumentEntryUpdateFields returns the union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps as a SecurityAIAssistantAPIDocumentEntryUpdateFields +func (t SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps) AsSecurityAIAssistantAPIDocumentEntryUpdateFields() (SecurityAIAssistantAPIDocumentEntryUpdateFields, error) { + var body SecurityAIAssistantAPIDocumentEntryUpdateFields + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIDocumentEntryUpdateFields overwrites any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps as the provided SecurityAIAssistantAPIDocumentEntryUpdateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps) FromSecurityAIAssistantAPIDocumentEntryUpdateFields(v SecurityAIAssistantAPIDocumentEntryUpdateFields) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIDocumentEntryUpdateFields performs a merge with any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps, using the provided SecurityAIAssistantAPIDocumentEntryUpdateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps) MergeSecurityAIAssistantAPIDocumentEntryUpdateFields(v SecurityAIAssistantAPIDocumentEntryUpdateFields) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityAIAssistantAPIIndexEntryUpdateFields returns the union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps as a SecurityAIAssistantAPIIndexEntryUpdateFields +func (t SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps) AsSecurityAIAssistantAPIIndexEntryUpdateFields() (SecurityAIAssistantAPIIndexEntryUpdateFields, error) { + var body SecurityAIAssistantAPIIndexEntryUpdateFields + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIIndexEntryUpdateFields overwrites any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps as the provided SecurityAIAssistantAPIIndexEntryUpdateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps) FromSecurityAIAssistantAPIIndexEntryUpdateFields(v SecurityAIAssistantAPIIndexEntryUpdateFields) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIIndexEntryUpdateFields performs a merge with any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps, using the provided SecurityAIAssistantAPIIndexEntryUpdateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps) MergeSecurityAIAssistantAPIIndexEntryUpdateFields(v SecurityAIAssistantAPIIndexEntryUpdateFields) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryUpdateProps) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityAIAssistantAPIDocumentEntryCreateFields returns the union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps as a SecurityAIAssistantAPIDocumentEntryCreateFields +func (t SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps) AsSecurityAIAssistantAPIDocumentEntryCreateFields() (SecurityAIAssistantAPIDocumentEntryCreateFields, error) { + var body SecurityAIAssistantAPIDocumentEntryCreateFields + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIDocumentEntryCreateFields overwrites any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps as the provided SecurityAIAssistantAPIDocumentEntryCreateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps) FromSecurityAIAssistantAPIDocumentEntryCreateFields(v SecurityAIAssistantAPIDocumentEntryCreateFields) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIDocumentEntryCreateFields performs a merge with any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps, using the provided SecurityAIAssistantAPIDocumentEntryCreateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps) MergeSecurityAIAssistantAPIDocumentEntryCreateFields(v SecurityAIAssistantAPIDocumentEntryCreateFields) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityAIAssistantAPIIndexEntryCreateFields returns the union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps as a SecurityAIAssistantAPIIndexEntryCreateFields +func (t SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps) AsSecurityAIAssistantAPIIndexEntryCreateFields() (SecurityAIAssistantAPIIndexEntryCreateFields, error) { + var body SecurityAIAssistantAPIIndexEntryCreateFields + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityAIAssistantAPIIndexEntryCreateFields overwrites any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps as the provided SecurityAIAssistantAPIIndexEntryCreateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps) FromSecurityAIAssistantAPIIndexEntryCreateFields(v SecurityAIAssistantAPIIndexEntryCreateFields) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityAIAssistantAPIIndexEntryCreateFields performs a merge with any union data inside the SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps, using the provided SecurityAIAssistantAPIIndexEntryCreateFields +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps) MergeSecurityAIAssistantAPIIndexEntryCreateFields(v SecurityAIAssistantAPIIndexEntryCreateFields) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityAIAssistantAPIKnowledgeBaseEntryUpdateRouteProps) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIAlertsSortCombinations returns the union data inside the SecurityDetectionsAPIAlertsSort as a SecurityDetectionsAPIAlertsSortCombinations +func (t SecurityDetectionsAPIAlertsSort) AsSecurityDetectionsAPIAlertsSortCombinations() (SecurityDetectionsAPIAlertsSortCombinations, error) { + var body SecurityDetectionsAPIAlertsSortCombinations + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIAlertsSortCombinations overwrites any union data inside the SecurityDetectionsAPIAlertsSort as the provided SecurityDetectionsAPIAlertsSortCombinations +func (t *SecurityDetectionsAPIAlertsSort) FromSecurityDetectionsAPIAlertsSortCombinations(v SecurityDetectionsAPIAlertsSortCombinations) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIAlertsSortCombinations performs a merge with any union data inside the SecurityDetectionsAPIAlertsSort, using the provided SecurityDetectionsAPIAlertsSortCombinations +func (t *SecurityDetectionsAPIAlertsSort) MergeSecurityDetectionsAPIAlertsSortCombinations(v SecurityDetectionsAPIAlertsSortCombinations) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIAlertsSort1 returns the union data inside the SecurityDetectionsAPIAlertsSort as a SecurityDetectionsAPIAlertsSort1 +func (t SecurityDetectionsAPIAlertsSort) AsSecurityDetectionsAPIAlertsSort1() (SecurityDetectionsAPIAlertsSort1, error) { + var body SecurityDetectionsAPIAlertsSort1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIAlertsSort1 overwrites any union data inside the SecurityDetectionsAPIAlertsSort as the provided SecurityDetectionsAPIAlertsSort1 +func (t *SecurityDetectionsAPIAlertsSort) FromSecurityDetectionsAPIAlertsSort1(v SecurityDetectionsAPIAlertsSort1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIAlertsSort1 performs a merge with any union data inside the SecurityDetectionsAPIAlertsSort, using the provided SecurityDetectionsAPIAlertsSort1 +func (t *SecurityDetectionsAPIAlertsSort) MergeSecurityDetectionsAPIAlertsSort1(v SecurityDetectionsAPIAlertsSort1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIAlertsSort) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIAlertsSort) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIAlertsSortCombinations0 returns the union data inside the SecurityDetectionsAPIAlertsSortCombinations as a SecurityDetectionsAPIAlertsSortCombinations0 +func (t SecurityDetectionsAPIAlertsSortCombinations) AsSecurityDetectionsAPIAlertsSortCombinations0() (SecurityDetectionsAPIAlertsSortCombinations0, error) { + var body SecurityDetectionsAPIAlertsSortCombinations0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIAlertsSortCombinations0 overwrites any union data inside the SecurityDetectionsAPIAlertsSortCombinations as the provided SecurityDetectionsAPIAlertsSortCombinations0 +func (t *SecurityDetectionsAPIAlertsSortCombinations) FromSecurityDetectionsAPIAlertsSortCombinations0(v SecurityDetectionsAPIAlertsSortCombinations0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIAlertsSortCombinations0 performs a merge with any union data inside the SecurityDetectionsAPIAlertsSortCombinations, using the provided SecurityDetectionsAPIAlertsSortCombinations0 +func (t *SecurityDetectionsAPIAlertsSortCombinations) MergeSecurityDetectionsAPIAlertsSortCombinations0(v SecurityDetectionsAPIAlertsSortCombinations0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIAlertsSortCombinations1 returns the union data inside the SecurityDetectionsAPIAlertsSortCombinations as a SecurityDetectionsAPIAlertsSortCombinations1 +func (t SecurityDetectionsAPIAlertsSortCombinations) AsSecurityDetectionsAPIAlertsSortCombinations1() (SecurityDetectionsAPIAlertsSortCombinations1, error) { + var body SecurityDetectionsAPIAlertsSortCombinations1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIAlertsSortCombinations1 overwrites any union data inside the SecurityDetectionsAPIAlertsSortCombinations as the provided SecurityDetectionsAPIAlertsSortCombinations1 +func (t *SecurityDetectionsAPIAlertsSortCombinations) FromSecurityDetectionsAPIAlertsSortCombinations1(v SecurityDetectionsAPIAlertsSortCombinations1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIAlertsSortCombinations1 performs a merge with any union data inside the SecurityDetectionsAPIAlertsSortCombinations, using the provided SecurityDetectionsAPIAlertsSortCombinations1 +func (t *SecurityDetectionsAPIAlertsSortCombinations) MergeSecurityDetectionsAPIAlertsSortCombinations1(v SecurityDetectionsAPIAlertsSortCombinations1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIAlertsSortCombinations) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIAlertsSortCombinations) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIBulkActionEditPayloadTags returns the union data inside the SecurityDetectionsAPIBulkActionEditPayload as a SecurityDetectionsAPIBulkActionEditPayloadTags +func (t SecurityDetectionsAPIBulkActionEditPayload) AsSecurityDetectionsAPIBulkActionEditPayloadTags() (SecurityDetectionsAPIBulkActionEditPayloadTags, error) { + var body SecurityDetectionsAPIBulkActionEditPayloadTags + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkActionEditPayloadTags overwrites any union data inside the SecurityDetectionsAPIBulkActionEditPayload as the provided SecurityDetectionsAPIBulkActionEditPayloadTags +func (t *SecurityDetectionsAPIBulkActionEditPayload) FromSecurityDetectionsAPIBulkActionEditPayloadTags(v SecurityDetectionsAPIBulkActionEditPayloadTags) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkActionEditPayloadTags performs a merge with any union data inside the SecurityDetectionsAPIBulkActionEditPayload, using the provided SecurityDetectionsAPIBulkActionEditPayloadTags +func (t *SecurityDetectionsAPIBulkActionEditPayload) MergeSecurityDetectionsAPIBulkActionEditPayloadTags(v SecurityDetectionsAPIBulkActionEditPayloadTags) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIBulkActionEditPayloadIndexPatterns returns the union data inside the SecurityDetectionsAPIBulkActionEditPayload as a SecurityDetectionsAPIBulkActionEditPayloadIndexPatterns +func (t SecurityDetectionsAPIBulkActionEditPayload) AsSecurityDetectionsAPIBulkActionEditPayloadIndexPatterns() (SecurityDetectionsAPIBulkActionEditPayloadIndexPatterns, error) { + var body SecurityDetectionsAPIBulkActionEditPayloadIndexPatterns + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkActionEditPayloadIndexPatterns overwrites any union data inside the SecurityDetectionsAPIBulkActionEditPayload as the provided SecurityDetectionsAPIBulkActionEditPayloadIndexPatterns +func (t *SecurityDetectionsAPIBulkActionEditPayload) FromSecurityDetectionsAPIBulkActionEditPayloadIndexPatterns(v SecurityDetectionsAPIBulkActionEditPayloadIndexPatterns) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkActionEditPayloadIndexPatterns performs a merge with any union data inside the SecurityDetectionsAPIBulkActionEditPayload, using the provided SecurityDetectionsAPIBulkActionEditPayloadIndexPatterns +func (t *SecurityDetectionsAPIBulkActionEditPayload) MergeSecurityDetectionsAPIBulkActionEditPayloadIndexPatterns(v SecurityDetectionsAPIBulkActionEditPayloadIndexPatterns) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIBulkActionEditPayloadInvestigationFields returns the union data inside the SecurityDetectionsAPIBulkActionEditPayload as a SecurityDetectionsAPIBulkActionEditPayloadInvestigationFields +func (t SecurityDetectionsAPIBulkActionEditPayload) AsSecurityDetectionsAPIBulkActionEditPayloadInvestigationFields() (SecurityDetectionsAPIBulkActionEditPayloadInvestigationFields, error) { + var body SecurityDetectionsAPIBulkActionEditPayloadInvestigationFields + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkActionEditPayloadInvestigationFields overwrites any union data inside the SecurityDetectionsAPIBulkActionEditPayload as the provided SecurityDetectionsAPIBulkActionEditPayloadInvestigationFields +func (t *SecurityDetectionsAPIBulkActionEditPayload) FromSecurityDetectionsAPIBulkActionEditPayloadInvestigationFields(v SecurityDetectionsAPIBulkActionEditPayloadInvestigationFields) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkActionEditPayloadInvestigationFields performs a merge with any union data inside the SecurityDetectionsAPIBulkActionEditPayload, using the provided SecurityDetectionsAPIBulkActionEditPayloadInvestigationFields +func (t *SecurityDetectionsAPIBulkActionEditPayload) MergeSecurityDetectionsAPIBulkActionEditPayloadInvestigationFields(v SecurityDetectionsAPIBulkActionEditPayloadInvestigationFields) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIBulkActionEditPayloadTimeline returns the union data inside the SecurityDetectionsAPIBulkActionEditPayload as a SecurityDetectionsAPIBulkActionEditPayloadTimeline +func (t SecurityDetectionsAPIBulkActionEditPayload) AsSecurityDetectionsAPIBulkActionEditPayloadTimeline() (SecurityDetectionsAPIBulkActionEditPayloadTimeline, error) { + var body SecurityDetectionsAPIBulkActionEditPayloadTimeline + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkActionEditPayloadTimeline overwrites any union data inside the SecurityDetectionsAPIBulkActionEditPayload as the provided SecurityDetectionsAPIBulkActionEditPayloadTimeline +func (t *SecurityDetectionsAPIBulkActionEditPayload) FromSecurityDetectionsAPIBulkActionEditPayloadTimeline(v SecurityDetectionsAPIBulkActionEditPayloadTimeline) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkActionEditPayloadTimeline performs a merge with any union data inside the SecurityDetectionsAPIBulkActionEditPayload, using the provided SecurityDetectionsAPIBulkActionEditPayloadTimeline +func (t *SecurityDetectionsAPIBulkActionEditPayload) MergeSecurityDetectionsAPIBulkActionEditPayloadTimeline(v SecurityDetectionsAPIBulkActionEditPayloadTimeline) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIBulkActionEditPayloadRuleActions returns the union data inside the SecurityDetectionsAPIBulkActionEditPayload as a SecurityDetectionsAPIBulkActionEditPayloadRuleActions +func (t SecurityDetectionsAPIBulkActionEditPayload) AsSecurityDetectionsAPIBulkActionEditPayloadRuleActions() (SecurityDetectionsAPIBulkActionEditPayloadRuleActions, error) { + var body SecurityDetectionsAPIBulkActionEditPayloadRuleActions + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkActionEditPayloadRuleActions overwrites any union data inside the SecurityDetectionsAPIBulkActionEditPayload as the provided SecurityDetectionsAPIBulkActionEditPayloadRuleActions +func (t *SecurityDetectionsAPIBulkActionEditPayload) FromSecurityDetectionsAPIBulkActionEditPayloadRuleActions(v SecurityDetectionsAPIBulkActionEditPayloadRuleActions) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkActionEditPayloadRuleActions performs a merge with any union data inside the SecurityDetectionsAPIBulkActionEditPayload, using the provided SecurityDetectionsAPIBulkActionEditPayloadRuleActions +func (t *SecurityDetectionsAPIBulkActionEditPayload) MergeSecurityDetectionsAPIBulkActionEditPayloadRuleActions(v SecurityDetectionsAPIBulkActionEditPayloadRuleActions) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIBulkActionEditPayloadSchedule returns the union data inside the SecurityDetectionsAPIBulkActionEditPayload as a SecurityDetectionsAPIBulkActionEditPayloadSchedule +func (t SecurityDetectionsAPIBulkActionEditPayload) AsSecurityDetectionsAPIBulkActionEditPayloadSchedule() (SecurityDetectionsAPIBulkActionEditPayloadSchedule, error) { + var body SecurityDetectionsAPIBulkActionEditPayloadSchedule + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkActionEditPayloadSchedule overwrites any union data inside the SecurityDetectionsAPIBulkActionEditPayload as the provided SecurityDetectionsAPIBulkActionEditPayloadSchedule +func (t *SecurityDetectionsAPIBulkActionEditPayload) FromSecurityDetectionsAPIBulkActionEditPayloadSchedule(v SecurityDetectionsAPIBulkActionEditPayloadSchedule) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkActionEditPayloadSchedule performs a merge with any union data inside the SecurityDetectionsAPIBulkActionEditPayload, using the provided SecurityDetectionsAPIBulkActionEditPayloadSchedule +func (t *SecurityDetectionsAPIBulkActionEditPayload) MergeSecurityDetectionsAPIBulkActionEditPayloadSchedule(v SecurityDetectionsAPIBulkActionEditPayloadSchedule) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIBulkActionEditPayloadAlertSuppression returns the union data inside the SecurityDetectionsAPIBulkActionEditPayload as a SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression +func (t SecurityDetectionsAPIBulkActionEditPayload) AsSecurityDetectionsAPIBulkActionEditPayloadAlertSuppression() (SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression, error) { + var body SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkActionEditPayloadAlertSuppression overwrites any union data inside the SecurityDetectionsAPIBulkActionEditPayload as the provided SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression +func (t *SecurityDetectionsAPIBulkActionEditPayload) FromSecurityDetectionsAPIBulkActionEditPayloadAlertSuppression(v SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkActionEditPayloadAlertSuppression performs a merge with any union data inside the SecurityDetectionsAPIBulkActionEditPayload, using the provided SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression +func (t *SecurityDetectionsAPIBulkActionEditPayload) MergeSecurityDetectionsAPIBulkActionEditPayloadAlertSuppression(v SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIBulkActionEditPayload) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIBulkActionEditPayload) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression returns the union data inside the SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression as a SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression +func (t SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) AsSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression() (SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression, error) { + var body SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression overwrites any union data inside the SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression as the provided SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression +func (t *SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) FromSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression(v SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression performs a merge with any union data inside the SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression, using the provided SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression +func (t *SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) MergeSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression(v SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppression) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold returns the union data inside the SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression as a SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold +func (t SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) AsSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold() (SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold, error) { + var body SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold overwrites any union data inside the SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression as the provided SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold +func (t *SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) FromSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold(v SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold performs a merge with any union data inside the SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression, using the provided SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold +func (t *SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) MergeSecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold(v SecurityDetectionsAPIBulkActionEditPayloadSetAlertSuppressionForThreshold) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression returns the union data inside the SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression as a SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression +func (t SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) AsSecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression() (SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression, error) { + var body SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression overwrites any union data inside the SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression as the provided SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression +func (t *SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) FromSecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression(v SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression performs a merge with any union data inside the SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression, using the provided SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression +func (t *SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) MergeSecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression(v SecurityDetectionsAPIBulkActionEditPayloadDeleteAlertSuppression) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIBulkActionEditPayloadAlertSuppression) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIBulkEditSkipReason returns the union data inside the SecurityDetectionsAPIBulkActionSkipResult_SkipReason as a SecurityDetectionsAPIBulkEditSkipReason +func (t SecurityDetectionsAPIBulkActionSkipResult_SkipReason) AsSecurityDetectionsAPIBulkEditSkipReason() (SecurityDetectionsAPIBulkEditSkipReason, error) { + var body SecurityDetectionsAPIBulkEditSkipReason + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkEditSkipReason overwrites any union data inside the SecurityDetectionsAPIBulkActionSkipResult_SkipReason as the provided SecurityDetectionsAPIBulkEditSkipReason +func (t *SecurityDetectionsAPIBulkActionSkipResult_SkipReason) FromSecurityDetectionsAPIBulkEditSkipReason(v SecurityDetectionsAPIBulkEditSkipReason) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkEditSkipReason performs a merge with any union data inside the SecurityDetectionsAPIBulkActionSkipResult_SkipReason, using the provided SecurityDetectionsAPIBulkEditSkipReason +func (t *SecurityDetectionsAPIBulkActionSkipResult_SkipReason) MergeSecurityDetectionsAPIBulkEditSkipReason(v SecurityDetectionsAPIBulkEditSkipReason) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIBulkGapsFillingSkipReason returns the union data inside the SecurityDetectionsAPIBulkActionSkipResult_SkipReason as a SecurityDetectionsAPIBulkGapsFillingSkipReason +func (t SecurityDetectionsAPIBulkActionSkipResult_SkipReason) AsSecurityDetectionsAPIBulkGapsFillingSkipReason() (SecurityDetectionsAPIBulkGapsFillingSkipReason, error) { + var body SecurityDetectionsAPIBulkGapsFillingSkipReason + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIBulkGapsFillingSkipReason overwrites any union data inside the SecurityDetectionsAPIBulkActionSkipResult_SkipReason as the provided SecurityDetectionsAPIBulkGapsFillingSkipReason +func (t *SecurityDetectionsAPIBulkActionSkipResult_SkipReason) FromSecurityDetectionsAPIBulkGapsFillingSkipReason(v SecurityDetectionsAPIBulkGapsFillingSkipReason) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIBulkGapsFillingSkipReason performs a merge with any union data inside the SecurityDetectionsAPIBulkActionSkipResult_SkipReason, using the provided SecurityDetectionsAPIBulkGapsFillingSkipReason +func (t *SecurityDetectionsAPIBulkActionSkipResult_SkipReason) MergeSecurityDetectionsAPIBulkGapsFillingSkipReason(v SecurityDetectionsAPIBulkGapsFillingSkipReason) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIBulkActionSkipResult_SkipReason) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIBulkActionSkipResult_SkipReason) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIEcsMappingValue0 returns the union data inside the SecurityDetectionsAPIEcsMapping_Value as a SecurityDetectionsAPIEcsMappingValue0 +func (t SecurityDetectionsAPIEcsMapping_Value) AsSecurityDetectionsAPIEcsMappingValue0() (SecurityDetectionsAPIEcsMappingValue0, error) { + var body SecurityDetectionsAPIEcsMappingValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIEcsMappingValue0 overwrites any union data inside the SecurityDetectionsAPIEcsMapping_Value as the provided SecurityDetectionsAPIEcsMappingValue0 +func (t *SecurityDetectionsAPIEcsMapping_Value) FromSecurityDetectionsAPIEcsMappingValue0(v SecurityDetectionsAPIEcsMappingValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIEcsMappingValue0 performs a merge with any union data inside the SecurityDetectionsAPIEcsMapping_Value, using the provided SecurityDetectionsAPIEcsMappingValue0 +func (t *SecurityDetectionsAPIEcsMapping_Value) MergeSecurityDetectionsAPIEcsMappingValue0(v SecurityDetectionsAPIEcsMappingValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIEcsMappingValue1 returns the union data inside the SecurityDetectionsAPIEcsMapping_Value as a SecurityDetectionsAPIEcsMappingValue1 +func (t SecurityDetectionsAPIEcsMapping_Value) AsSecurityDetectionsAPIEcsMappingValue1() (SecurityDetectionsAPIEcsMappingValue1, error) { + var body SecurityDetectionsAPIEcsMappingValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIEcsMappingValue1 overwrites any union data inside the SecurityDetectionsAPIEcsMapping_Value as the provided SecurityDetectionsAPIEcsMappingValue1 +func (t *SecurityDetectionsAPIEcsMapping_Value) FromSecurityDetectionsAPIEcsMappingValue1(v SecurityDetectionsAPIEcsMappingValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIEcsMappingValue1 performs a merge with any union data inside the SecurityDetectionsAPIEcsMapping_Value, using the provided SecurityDetectionsAPIEcsMappingValue1 +func (t *SecurityDetectionsAPIEcsMapping_Value) MergeSecurityDetectionsAPIEcsMappingValue1(v SecurityDetectionsAPIEcsMappingValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIEcsMapping_Value) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIEcsMapping_Value) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIDefaultParams returns the union data inside the SecurityDetectionsAPIEndpointResponseAction_Params as a SecurityDetectionsAPIDefaultParams +func (t SecurityDetectionsAPIEndpointResponseAction_Params) AsSecurityDetectionsAPIDefaultParams() (SecurityDetectionsAPIDefaultParams, error) { + var body SecurityDetectionsAPIDefaultParams + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIDefaultParams overwrites any union data inside the SecurityDetectionsAPIEndpointResponseAction_Params as the provided SecurityDetectionsAPIDefaultParams +func (t *SecurityDetectionsAPIEndpointResponseAction_Params) FromSecurityDetectionsAPIDefaultParams(v SecurityDetectionsAPIDefaultParams) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIDefaultParams performs a merge with any union data inside the SecurityDetectionsAPIEndpointResponseAction_Params, using the provided SecurityDetectionsAPIDefaultParams +func (t *SecurityDetectionsAPIEndpointResponseAction_Params) MergeSecurityDetectionsAPIDefaultParams(v SecurityDetectionsAPIDefaultParams) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIProcessesParams returns the union data inside the SecurityDetectionsAPIEndpointResponseAction_Params as a SecurityDetectionsAPIProcessesParams +func (t SecurityDetectionsAPIEndpointResponseAction_Params) AsSecurityDetectionsAPIProcessesParams() (SecurityDetectionsAPIProcessesParams, error) { + var body SecurityDetectionsAPIProcessesParams + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIProcessesParams overwrites any union data inside the SecurityDetectionsAPIEndpointResponseAction_Params as the provided SecurityDetectionsAPIProcessesParams +func (t *SecurityDetectionsAPIEndpointResponseAction_Params) FromSecurityDetectionsAPIProcessesParams(v SecurityDetectionsAPIProcessesParams) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIProcessesParams performs a merge with any union data inside the SecurityDetectionsAPIEndpointResponseAction_Params, using the provided SecurityDetectionsAPIProcessesParams +func (t *SecurityDetectionsAPIEndpointResponseAction_Params) MergeSecurityDetectionsAPIProcessesParams(v SecurityDetectionsAPIProcessesParams) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIEndpointResponseAction_Params) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIEndpointResponseAction_Params) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIMachineLearningJobId0 returns the union data inside the SecurityDetectionsAPIMachineLearningJobId as a SecurityDetectionsAPIMachineLearningJobId0 +func (t SecurityDetectionsAPIMachineLearningJobId) AsSecurityDetectionsAPIMachineLearningJobId0() (SecurityDetectionsAPIMachineLearningJobId0, error) { + var body SecurityDetectionsAPIMachineLearningJobId0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIMachineLearningJobId0 overwrites any union data inside the SecurityDetectionsAPIMachineLearningJobId as the provided SecurityDetectionsAPIMachineLearningJobId0 +func (t *SecurityDetectionsAPIMachineLearningJobId) FromSecurityDetectionsAPIMachineLearningJobId0(v SecurityDetectionsAPIMachineLearningJobId0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIMachineLearningJobId0 performs a merge with any union data inside the SecurityDetectionsAPIMachineLearningJobId, using the provided SecurityDetectionsAPIMachineLearningJobId0 +func (t *SecurityDetectionsAPIMachineLearningJobId) MergeSecurityDetectionsAPIMachineLearningJobId0(v SecurityDetectionsAPIMachineLearningJobId0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIMachineLearningJobId1 returns the union data inside the SecurityDetectionsAPIMachineLearningJobId as a SecurityDetectionsAPIMachineLearningJobId1 +func (t SecurityDetectionsAPIMachineLearningJobId) AsSecurityDetectionsAPIMachineLearningJobId1() (SecurityDetectionsAPIMachineLearningJobId1, error) { + var body SecurityDetectionsAPIMachineLearningJobId1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIMachineLearningJobId1 overwrites any union data inside the SecurityDetectionsAPIMachineLearningJobId as the provided SecurityDetectionsAPIMachineLearningJobId1 +func (t *SecurityDetectionsAPIMachineLearningJobId) FromSecurityDetectionsAPIMachineLearningJobId1(v SecurityDetectionsAPIMachineLearningJobId1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIMachineLearningJobId1 performs a merge with any union data inside the SecurityDetectionsAPIMachineLearningJobId, using the provided SecurityDetectionsAPIMachineLearningJobId1 +func (t *SecurityDetectionsAPIMachineLearningJobId) MergeSecurityDetectionsAPIMachineLearningJobId1(v SecurityDetectionsAPIMachineLearningJobId1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIMachineLearningJobId) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIMachineLearningJobId) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIOsqueryResponseAction returns the union data inside the SecurityDetectionsAPIResponseAction as a SecurityDetectionsAPIOsqueryResponseAction +func (t SecurityDetectionsAPIResponseAction) AsSecurityDetectionsAPIOsqueryResponseAction() (SecurityDetectionsAPIOsqueryResponseAction, error) { + var body SecurityDetectionsAPIOsqueryResponseAction + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIOsqueryResponseAction overwrites any union data inside the SecurityDetectionsAPIResponseAction as the provided SecurityDetectionsAPIOsqueryResponseAction +func (t *SecurityDetectionsAPIResponseAction) FromSecurityDetectionsAPIOsqueryResponseAction(v SecurityDetectionsAPIOsqueryResponseAction) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIOsqueryResponseAction performs a merge with any union data inside the SecurityDetectionsAPIResponseAction, using the provided SecurityDetectionsAPIOsqueryResponseAction +func (t *SecurityDetectionsAPIResponseAction) MergeSecurityDetectionsAPIOsqueryResponseAction(v SecurityDetectionsAPIOsqueryResponseAction) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIEndpointResponseAction returns the union data inside the SecurityDetectionsAPIResponseAction as a SecurityDetectionsAPIEndpointResponseAction +func (t SecurityDetectionsAPIResponseAction) AsSecurityDetectionsAPIEndpointResponseAction() (SecurityDetectionsAPIEndpointResponseAction, error) { + var body SecurityDetectionsAPIEndpointResponseAction + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIEndpointResponseAction overwrites any union data inside the SecurityDetectionsAPIResponseAction as the provided SecurityDetectionsAPIEndpointResponseAction +func (t *SecurityDetectionsAPIResponseAction) FromSecurityDetectionsAPIEndpointResponseAction(v SecurityDetectionsAPIEndpointResponseAction) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIEndpointResponseAction performs a merge with any union data inside the SecurityDetectionsAPIResponseAction, using the provided SecurityDetectionsAPIEndpointResponseAction +func (t *SecurityDetectionsAPIResponseAction) MergeSecurityDetectionsAPIEndpointResponseAction(v SecurityDetectionsAPIEndpointResponseAction) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIResponseAction) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIResponseAction) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIRuleActionThrottle0 returns the union data inside the SecurityDetectionsAPIRuleActionThrottle as a SecurityDetectionsAPIRuleActionThrottle0 +func (t SecurityDetectionsAPIRuleActionThrottle) AsSecurityDetectionsAPIRuleActionThrottle0() (SecurityDetectionsAPIRuleActionThrottle0, error) { + var body SecurityDetectionsAPIRuleActionThrottle0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIRuleActionThrottle0 overwrites any union data inside the SecurityDetectionsAPIRuleActionThrottle as the provided SecurityDetectionsAPIRuleActionThrottle0 +func (t *SecurityDetectionsAPIRuleActionThrottle) FromSecurityDetectionsAPIRuleActionThrottle0(v SecurityDetectionsAPIRuleActionThrottle0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIRuleActionThrottle0 performs a merge with any union data inside the SecurityDetectionsAPIRuleActionThrottle, using the provided SecurityDetectionsAPIRuleActionThrottle0 +func (t *SecurityDetectionsAPIRuleActionThrottle) MergeSecurityDetectionsAPIRuleActionThrottle0(v SecurityDetectionsAPIRuleActionThrottle0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIRuleActionThrottle1 returns the union data inside the SecurityDetectionsAPIRuleActionThrottle as a SecurityDetectionsAPIRuleActionThrottle1 +func (t SecurityDetectionsAPIRuleActionThrottle) AsSecurityDetectionsAPIRuleActionThrottle1() (SecurityDetectionsAPIRuleActionThrottle1, error) { + var body SecurityDetectionsAPIRuleActionThrottle1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIRuleActionThrottle1 overwrites any union data inside the SecurityDetectionsAPIRuleActionThrottle as the provided SecurityDetectionsAPIRuleActionThrottle1 +func (t *SecurityDetectionsAPIRuleActionThrottle) FromSecurityDetectionsAPIRuleActionThrottle1(v SecurityDetectionsAPIRuleActionThrottle1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIRuleActionThrottle1 performs a merge with any union data inside the SecurityDetectionsAPIRuleActionThrottle, using the provided SecurityDetectionsAPIRuleActionThrottle1 +func (t *SecurityDetectionsAPIRuleActionThrottle) MergeSecurityDetectionsAPIRuleActionThrottle1(v SecurityDetectionsAPIRuleActionThrottle1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIRuleActionThrottle) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIRuleActionThrottle) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIEqlRuleCreateProps returns the union data inside the SecurityDetectionsAPIRuleCreateProps as a SecurityDetectionsAPIEqlRuleCreateProps +func (t SecurityDetectionsAPIRuleCreateProps) AsSecurityDetectionsAPIEqlRuleCreateProps() (SecurityDetectionsAPIEqlRuleCreateProps, error) { + var body SecurityDetectionsAPIEqlRuleCreateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIEqlRuleCreateProps overwrites any union data inside the SecurityDetectionsAPIRuleCreateProps as the provided SecurityDetectionsAPIEqlRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) FromSecurityDetectionsAPIEqlRuleCreateProps(v SecurityDetectionsAPIEqlRuleCreateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIEqlRuleCreateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleCreateProps, using the provided SecurityDetectionsAPIEqlRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) MergeSecurityDetectionsAPIEqlRuleCreateProps(v SecurityDetectionsAPIEqlRuleCreateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIQueryRuleCreateProps returns the union data inside the SecurityDetectionsAPIRuleCreateProps as a SecurityDetectionsAPIQueryRuleCreateProps +func (t SecurityDetectionsAPIRuleCreateProps) AsSecurityDetectionsAPIQueryRuleCreateProps() (SecurityDetectionsAPIQueryRuleCreateProps, error) { + var body SecurityDetectionsAPIQueryRuleCreateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIQueryRuleCreateProps overwrites any union data inside the SecurityDetectionsAPIRuleCreateProps as the provided SecurityDetectionsAPIQueryRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) FromSecurityDetectionsAPIQueryRuleCreateProps(v SecurityDetectionsAPIQueryRuleCreateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIQueryRuleCreateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleCreateProps, using the provided SecurityDetectionsAPIQueryRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) MergeSecurityDetectionsAPIQueryRuleCreateProps(v SecurityDetectionsAPIQueryRuleCreateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPISavedQueryRuleCreateProps returns the union data inside the SecurityDetectionsAPIRuleCreateProps as a SecurityDetectionsAPISavedQueryRuleCreateProps +func (t SecurityDetectionsAPIRuleCreateProps) AsSecurityDetectionsAPISavedQueryRuleCreateProps() (SecurityDetectionsAPISavedQueryRuleCreateProps, error) { + var body SecurityDetectionsAPISavedQueryRuleCreateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPISavedQueryRuleCreateProps overwrites any union data inside the SecurityDetectionsAPIRuleCreateProps as the provided SecurityDetectionsAPISavedQueryRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) FromSecurityDetectionsAPISavedQueryRuleCreateProps(v SecurityDetectionsAPISavedQueryRuleCreateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPISavedQueryRuleCreateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleCreateProps, using the provided SecurityDetectionsAPISavedQueryRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) MergeSecurityDetectionsAPISavedQueryRuleCreateProps(v SecurityDetectionsAPISavedQueryRuleCreateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIThresholdRuleCreateProps returns the union data inside the SecurityDetectionsAPIRuleCreateProps as a SecurityDetectionsAPIThresholdRuleCreateProps +func (t SecurityDetectionsAPIRuleCreateProps) AsSecurityDetectionsAPIThresholdRuleCreateProps() (SecurityDetectionsAPIThresholdRuleCreateProps, error) { + var body SecurityDetectionsAPIThresholdRuleCreateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIThresholdRuleCreateProps overwrites any union data inside the SecurityDetectionsAPIRuleCreateProps as the provided SecurityDetectionsAPIThresholdRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) FromSecurityDetectionsAPIThresholdRuleCreateProps(v SecurityDetectionsAPIThresholdRuleCreateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIThresholdRuleCreateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleCreateProps, using the provided SecurityDetectionsAPIThresholdRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) MergeSecurityDetectionsAPIThresholdRuleCreateProps(v SecurityDetectionsAPIThresholdRuleCreateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIThreatMatchRuleCreateProps returns the union data inside the SecurityDetectionsAPIRuleCreateProps as a SecurityDetectionsAPIThreatMatchRuleCreateProps +func (t SecurityDetectionsAPIRuleCreateProps) AsSecurityDetectionsAPIThreatMatchRuleCreateProps() (SecurityDetectionsAPIThreatMatchRuleCreateProps, error) { + var body SecurityDetectionsAPIThreatMatchRuleCreateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIThreatMatchRuleCreateProps overwrites any union data inside the SecurityDetectionsAPIRuleCreateProps as the provided SecurityDetectionsAPIThreatMatchRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) FromSecurityDetectionsAPIThreatMatchRuleCreateProps(v SecurityDetectionsAPIThreatMatchRuleCreateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIThreatMatchRuleCreateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleCreateProps, using the provided SecurityDetectionsAPIThreatMatchRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) MergeSecurityDetectionsAPIThreatMatchRuleCreateProps(v SecurityDetectionsAPIThreatMatchRuleCreateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIMachineLearningRuleCreateProps returns the union data inside the SecurityDetectionsAPIRuleCreateProps as a SecurityDetectionsAPIMachineLearningRuleCreateProps +func (t SecurityDetectionsAPIRuleCreateProps) AsSecurityDetectionsAPIMachineLearningRuleCreateProps() (SecurityDetectionsAPIMachineLearningRuleCreateProps, error) { + var body SecurityDetectionsAPIMachineLearningRuleCreateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIMachineLearningRuleCreateProps overwrites any union data inside the SecurityDetectionsAPIRuleCreateProps as the provided SecurityDetectionsAPIMachineLearningRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) FromSecurityDetectionsAPIMachineLearningRuleCreateProps(v SecurityDetectionsAPIMachineLearningRuleCreateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIMachineLearningRuleCreateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleCreateProps, using the provided SecurityDetectionsAPIMachineLearningRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) MergeSecurityDetectionsAPIMachineLearningRuleCreateProps(v SecurityDetectionsAPIMachineLearningRuleCreateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPINewTermsRuleCreateProps returns the union data inside the SecurityDetectionsAPIRuleCreateProps as a SecurityDetectionsAPINewTermsRuleCreateProps +func (t SecurityDetectionsAPIRuleCreateProps) AsSecurityDetectionsAPINewTermsRuleCreateProps() (SecurityDetectionsAPINewTermsRuleCreateProps, error) { + var body SecurityDetectionsAPINewTermsRuleCreateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPINewTermsRuleCreateProps overwrites any union data inside the SecurityDetectionsAPIRuleCreateProps as the provided SecurityDetectionsAPINewTermsRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) FromSecurityDetectionsAPINewTermsRuleCreateProps(v SecurityDetectionsAPINewTermsRuleCreateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPINewTermsRuleCreateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleCreateProps, using the provided SecurityDetectionsAPINewTermsRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) MergeSecurityDetectionsAPINewTermsRuleCreateProps(v SecurityDetectionsAPINewTermsRuleCreateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIEsqlRuleCreateProps returns the union data inside the SecurityDetectionsAPIRuleCreateProps as a SecurityDetectionsAPIEsqlRuleCreateProps +func (t SecurityDetectionsAPIRuleCreateProps) AsSecurityDetectionsAPIEsqlRuleCreateProps() (SecurityDetectionsAPIEsqlRuleCreateProps, error) { + var body SecurityDetectionsAPIEsqlRuleCreateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIEsqlRuleCreateProps overwrites any union data inside the SecurityDetectionsAPIRuleCreateProps as the provided SecurityDetectionsAPIEsqlRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) FromSecurityDetectionsAPIEsqlRuleCreateProps(v SecurityDetectionsAPIEsqlRuleCreateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIEsqlRuleCreateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleCreateProps, using the provided SecurityDetectionsAPIEsqlRuleCreateProps +func (t *SecurityDetectionsAPIRuleCreateProps) MergeSecurityDetectionsAPIEsqlRuleCreateProps(v SecurityDetectionsAPIEsqlRuleCreateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIRuleCreateProps) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIRuleCreateProps) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIEqlRulePatchProps returns the union data inside the SecurityDetectionsAPIRulePatchProps as a SecurityDetectionsAPIEqlRulePatchProps +func (t SecurityDetectionsAPIRulePatchProps) AsSecurityDetectionsAPIEqlRulePatchProps() (SecurityDetectionsAPIEqlRulePatchProps, error) { + var body SecurityDetectionsAPIEqlRulePatchProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIEqlRulePatchProps overwrites any union data inside the SecurityDetectionsAPIRulePatchProps as the provided SecurityDetectionsAPIEqlRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) FromSecurityDetectionsAPIEqlRulePatchProps(v SecurityDetectionsAPIEqlRulePatchProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIEqlRulePatchProps performs a merge with any union data inside the SecurityDetectionsAPIRulePatchProps, using the provided SecurityDetectionsAPIEqlRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) MergeSecurityDetectionsAPIEqlRulePatchProps(v SecurityDetectionsAPIEqlRulePatchProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIQueryRulePatchProps returns the union data inside the SecurityDetectionsAPIRulePatchProps as a SecurityDetectionsAPIQueryRulePatchProps +func (t SecurityDetectionsAPIRulePatchProps) AsSecurityDetectionsAPIQueryRulePatchProps() (SecurityDetectionsAPIQueryRulePatchProps, error) { + var body SecurityDetectionsAPIQueryRulePatchProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIQueryRulePatchProps overwrites any union data inside the SecurityDetectionsAPIRulePatchProps as the provided SecurityDetectionsAPIQueryRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) FromSecurityDetectionsAPIQueryRulePatchProps(v SecurityDetectionsAPIQueryRulePatchProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIQueryRulePatchProps performs a merge with any union data inside the SecurityDetectionsAPIRulePatchProps, using the provided SecurityDetectionsAPIQueryRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) MergeSecurityDetectionsAPIQueryRulePatchProps(v SecurityDetectionsAPIQueryRulePatchProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPISavedQueryRulePatchProps returns the union data inside the SecurityDetectionsAPIRulePatchProps as a SecurityDetectionsAPISavedQueryRulePatchProps +func (t SecurityDetectionsAPIRulePatchProps) AsSecurityDetectionsAPISavedQueryRulePatchProps() (SecurityDetectionsAPISavedQueryRulePatchProps, error) { + var body SecurityDetectionsAPISavedQueryRulePatchProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPISavedQueryRulePatchProps overwrites any union data inside the SecurityDetectionsAPIRulePatchProps as the provided SecurityDetectionsAPISavedQueryRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) FromSecurityDetectionsAPISavedQueryRulePatchProps(v SecurityDetectionsAPISavedQueryRulePatchProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPISavedQueryRulePatchProps performs a merge with any union data inside the SecurityDetectionsAPIRulePatchProps, using the provided SecurityDetectionsAPISavedQueryRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) MergeSecurityDetectionsAPISavedQueryRulePatchProps(v SecurityDetectionsAPISavedQueryRulePatchProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIThresholdRulePatchProps returns the union data inside the SecurityDetectionsAPIRulePatchProps as a SecurityDetectionsAPIThresholdRulePatchProps +func (t SecurityDetectionsAPIRulePatchProps) AsSecurityDetectionsAPIThresholdRulePatchProps() (SecurityDetectionsAPIThresholdRulePatchProps, error) { + var body SecurityDetectionsAPIThresholdRulePatchProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIThresholdRulePatchProps overwrites any union data inside the SecurityDetectionsAPIRulePatchProps as the provided SecurityDetectionsAPIThresholdRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) FromSecurityDetectionsAPIThresholdRulePatchProps(v SecurityDetectionsAPIThresholdRulePatchProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIThresholdRulePatchProps performs a merge with any union data inside the SecurityDetectionsAPIRulePatchProps, using the provided SecurityDetectionsAPIThresholdRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) MergeSecurityDetectionsAPIThresholdRulePatchProps(v SecurityDetectionsAPIThresholdRulePatchProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIThreatMatchRulePatchProps returns the union data inside the SecurityDetectionsAPIRulePatchProps as a SecurityDetectionsAPIThreatMatchRulePatchProps +func (t SecurityDetectionsAPIRulePatchProps) AsSecurityDetectionsAPIThreatMatchRulePatchProps() (SecurityDetectionsAPIThreatMatchRulePatchProps, error) { + var body SecurityDetectionsAPIThreatMatchRulePatchProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIThreatMatchRulePatchProps overwrites any union data inside the SecurityDetectionsAPIRulePatchProps as the provided SecurityDetectionsAPIThreatMatchRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) FromSecurityDetectionsAPIThreatMatchRulePatchProps(v SecurityDetectionsAPIThreatMatchRulePatchProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIThreatMatchRulePatchProps performs a merge with any union data inside the SecurityDetectionsAPIRulePatchProps, using the provided SecurityDetectionsAPIThreatMatchRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) MergeSecurityDetectionsAPIThreatMatchRulePatchProps(v SecurityDetectionsAPIThreatMatchRulePatchProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIMachineLearningRulePatchProps returns the union data inside the SecurityDetectionsAPIRulePatchProps as a SecurityDetectionsAPIMachineLearningRulePatchProps +func (t SecurityDetectionsAPIRulePatchProps) AsSecurityDetectionsAPIMachineLearningRulePatchProps() (SecurityDetectionsAPIMachineLearningRulePatchProps, error) { + var body SecurityDetectionsAPIMachineLearningRulePatchProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIMachineLearningRulePatchProps overwrites any union data inside the SecurityDetectionsAPIRulePatchProps as the provided SecurityDetectionsAPIMachineLearningRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) FromSecurityDetectionsAPIMachineLearningRulePatchProps(v SecurityDetectionsAPIMachineLearningRulePatchProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIMachineLearningRulePatchProps performs a merge with any union data inside the SecurityDetectionsAPIRulePatchProps, using the provided SecurityDetectionsAPIMachineLearningRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) MergeSecurityDetectionsAPIMachineLearningRulePatchProps(v SecurityDetectionsAPIMachineLearningRulePatchProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPINewTermsRulePatchProps returns the union data inside the SecurityDetectionsAPIRulePatchProps as a SecurityDetectionsAPINewTermsRulePatchProps +func (t SecurityDetectionsAPIRulePatchProps) AsSecurityDetectionsAPINewTermsRulePatchProps() (SecurityDetectionsAPINewTermsRulePatchProps, error) { + var body SecurityDetectionsAPINewTermsRulePatchProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPINewTermsRulePatchProps overwrites any union data inside the SecurityDetectionsAPIRulePatchProps as the provided SecurityDetectionsAPINewTermsRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) FromSecurityDetectionsAPINewTermsRulePatchProps(v SecurityDetectionsAPINewTermsRulePatchProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPINewTermsRulePatchProps performs a merge with any union data inside the SecurityDetectionsAPIRulePatchProps, using the provided SecurityDetectionsAPINewTermsRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) MergeSecurityDetectionsAPINewTermsRulePatchProps(v SecurityDetectionsAPINewTermsRulePatchProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIEsqlRulePatchProps returns the union data inside the SecurityDetectionsAPIRulePatchProps as a SecurityDetectionsAPIEsqlRulePatchProps +func (t SecurityDetectionsAPIRulePatchProps) AsSecurityDetectionsAPIEsqlRulePatchProps() (SecurityDetectionsAPIEsqlRulePatchProps, error) { + var body SecurityDetectionsAPIEsqlRulePatchProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIEsqlRulePatchProps overwrites any union data inside the SecurityDetectionsAPIRulePatchProps as the provided SecurityDetectionsAPIEsqlRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) FromSecurityDetectionsAPIEsqlRulePatchProps(v SecurityDetectionsAPIEsqlRulePatchProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIEsqlRulePatchProps performs a merge with any union data inside the SecurityDetectionsAPIRulePatchProps, using the provided SecurityDetectionsAPIEsqlRulePatchProps +func (t *SecurityDetectionsAPIRulePatchProps) MergeSecurityDetectionsAPIEsqlRulePatchProps(v SecurityDetectionsAPIEsqlRulePatchProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIRulePatchProps) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIRulePatchProps) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIEqlRule returns the union data inside the SecurityDetectionsAPIRuleResponse as a SecurityDetectionsAPIEqlRule +func (t SecurityDetectionsAPIRuleResponse) AsSecurityDetectionsAPIEqlRule() (SecurityDetectionsAPIEqlRule, error) { + var body SecurityDetectionsAPIEqlRule + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIEqlRule overwrites any union data inside the SecurityDetectionsAPIRuleResponse as the provided SecurityDetectionsAPIEqlRule +func (t *SecurityDetectionsAPIRuleResponse) FromSecurityDetectionsAPIEqlRule(v SecurityDetectionsAPIEqlRule) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIEqlRule performs a merge with any union data inside the SecurityDetectionsAPIRuleResponse, using the provided SecurityDetectionsAPIEqlRule +func (t *SecurityDetectionsAPIRuleResponse) MergeSecurityDetectionsAPIEqlRule(v SecurityDetectionsAPIEqlRule) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIQueryRule returns the union data inside the SecurityDetectionsAPIRuleResponse as a SecurityDetectionsAPIQueryRule +func (t SecurityDetectionsAPIRuleResponse) AsSecurityDetectionsAPIQueryRule() (SecurityDetectionsAPIQueryRule, error) { + var body SecurityDetectionsAPIQueryRule + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIQueryRule overwrites any union data inside the SecurityDetectionsAPIRuleResponse as the provided SecurityDetectionsAPIQueryRule +func (t *SecurityDetectionsAPIRuleResponse) FromSecurityDetectionsAPIQueryRule(v SecurityDetectionsAPIQueryRule) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIQueryRule performs a merge with any union data inside the SecurityDetectionsAPIRuleResponse, using the provided SecurityDetectionsAPIQueryRule +func (t *SecurityDetectionsAPIRuleResponse) MergeSecurityDetectionsAPIQueryRule(v SecurityDetectionsAPIQueryRule) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPISavedQueryRule returns the union data inside the SecurityDetectionsAPIRuleResponse as a SecurityDetectionsAPISavedQueryRule +func (t SecurityDetectionsAPIRuleResponse) AsSecurityDetectionsAPISavedQueryRule() (SecurityDetectionsAPISavedQueryRule, error) { + var body SecurityDetectionsAPISavedQueryRule + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPISavedQueryRule overwrites any union data inside the SecurityDetectionsAPIRuleResponse as the provided SecurityDetectionsAPISavedQueryRule +func (t *SecurityDetectionsAPIRuleResponse) FromSecurityDetectionsAPISavedQueryRule(v SecurityDetectionsAPISavedQueryRule) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPISavedQueryRule performs a merge with any union data inside the SecurityDetectionsAPIRuleResponse, using the provided SecurityDetectionsAPISavedQueryRule +func (t *SecurityDetectionsAPIRuleResponse) MergeSecurityDetectionsAPISavedQueryRule(v SecurityDetectionsAPISavedQueryRule) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIThresholdRule returns the union data inside the SecurityDetectionsAPIRuleResponse as a SecurityDetectionsAPIThresholdRule +func (t SecurityDetectionsAPIRuleResponse) AsSecurityDetectionsAPIThresholdRule() (SecurityDetectionsAPIThresholdRule, error) { + var body SecurityDetectionsAPIThresholdRule + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIThresholdRule overwrites any union data inside the SecurityDetectionsAPIRuleResponse as the provided SecurityDetectionsAPIThresholdRule +func (t *SecurityDetectionsAPIRuleResponse) FromSecurityDetectionsAPIThresholdRule(v SecurityDetectionsAPIThresholdRule) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIThresholdRule performs a merge with any union data inside the SecurityDetectionsAPIRuleResponse, using the provided SecurityDetectionsAPIThresholdRule +func (t *SecurityDetectionsAPIRuleResponse) MergeSecurityDetectionsAPIThresholdRule(v SecurityDetectionsAPIThresholdRule) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIThreatMatchRule returns the union data inside the SecurityDetectionsAPIRuleResponse as a SecurityDetectionsAPIThreatMatchRule +func (t SecurityDetectionsAPIRuleResponse) AsSecurityDetectionsAPIThreatMatchRule() (SecurityDetectionsAPIThreatMatchRule, error) { + var body SecurityDetectionsAPIThreatMatchRule + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIThreatMatchRule overwrites any union data inside the SecurityDetectionsAPIRuleResponse as the provided SecurityDetectionsAPIThreatMatchRule +func (t *SecurityDetectionsAPIRuleResponse) FromSecurityDetectionsAPIThreatMatchRule(v SecurityDetectionsAPIThreatMatchRule) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIThreatMatchRule performs a merge with any union data inside the SecurityDetectionsAPIRuleResponse, using the provided SecurityDetectionsAPIThreatMatchRule +func (t *SecurityDetectionsAPIRuleResponse) MergeSecurityDetectionsAPIThreatMatchRule(v SecurityDetectionsAPIThreatMatchRule) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIMachineLearningRule returns the union data inside the SecurityDetectionsAPIRuleResponse as a SecurityDetectionsAPIMachineLearningRule +func (t SecurityDetectionsAPIRuleResponse) AsSecurityDetectionsAPIMachineLearningRule() (SecurityDetectionsAPIMachineLearningRule, error) { + var body SecurityDetectionsAPIMachineLearningRule + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIMachineLearningRule overwrites any union data inside the SecurityDetectionsAPIRuleResponse as the provided SecurityDetectionsAPIMachineLearningRule +func (t *SecurityDetectionsAPIRuleResponse) FromSecurityDetectionsAPIMachineLearningRule(v SecurityDetectionsAPIMachineLearningRule) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIMachineLearningRule performs a merge with any union data inside the SecurityDetectionsAPIRuleResponse, using the provided SecurityDetectionsAPIMachineLearningRule +func (t *SecurityDetectionsAPIRuleResponse) MergeSecurityDetectionsAPIMachineLearningRule(v SecurityDetectionsAPIMachineLearningRule) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPINewTermsRule returns the union data inside the SecurityDetectionsAPIRuleResponse as a SecurityDetectionsAPINewTermsRule +func (t SecurityDetectionsAPIRuleResponse) AsSecurityDetectionsAPINewTermsRule() (SecurityDetectionsAPINewTermsRule, error) { + var body SecurityDetectionsAPINewTermsRule + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPINewTermsRule overwrites any union data inside the SecurityDetectionsAPIRuleResponse as the provided SecurityDetectionsAPINewTermsRule +func (t *SecurityDetectionsAPIRuleResponse) FromSecurityDetectionsAPINewTermsRule(v SecurityDetectionsAPINewTermsRule) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPINewTermsRule performs a merge with any union data inside the SecurityDetectionsAPIRuleResponse, using the provided SecurityDetectionsAPINewTermsRule +func (t *SecurityDetectionsAPIRuleResponse) MergeSecurityDetectionsAPINewTermsRule(v SecurityDetectionsAPINewTermsRule) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIEsqlRule returns the union data inside the SecurityDetectionsAPIRuleResponse as a SecurityDetectionsAPIEsqlRule +func (t SecurityDetectionsAPIRuleResponse) AsSecurityDetectionsAPIEsqlRule() (SecurityDetectionsAPIEsqlRule, error) { + var body SecurityDetectionsAPIEsqlRule + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIEsqlRule overwrites any union data inside the SecurityDetectionsAPIRuleResponse as the provided SecurityDetectionsAPIEsqlRule +func (t *SecurityDetectionsAPIRuleResponse) FromSecurityDetectionsAPIEsqlRule(v SecurityDetectionsAPIEsqlRule) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIEsqlRule performs a merge with any union data inside the SecurityDetectionsAPIRuleResponse, using the provided SecurityDetectionsAPIEsqlRule +func (t *SecurityDetectionsAPIRuleResponse) MergeSecurityDetectionsAPIEsqlRule(v SecurityDetectionsAPIEsqlRule) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIRuleResponse) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIRuleResponse) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIExternalRuleSource returns the union data inside the SecurityDetectionsAPIRuleSource as a SecurityDetectionsAPIExternalRuleSource +func (t SecurityDetectionsAPIRuleSource) AsSecurityDetectionsAPIExternalRuleSource() (SecurityDetectionsAPIExternalRuleSource, error) { + var body SecurityDetectionsAPIExternalRuleSource + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIExternalRuleSource overwrites any union data inside the SecurityDetectionsAPIRuleSource as the provided SecurityDetectionsAPIExternalRuleSource +func (t *SecurityDetectionsAPIRuleSource) FromSecurityDetectionsAPIExternalRuleSource(v SecurityDetectionsAPIExternalRuleSource) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIExternalRuleSource performs a merge with any union data inside the SecurityDetectionsAPIRuleSource, using the provided SecurityDetectionsAPIExternalRuleSource +func (t *SecurityDetectionsAPIRuleSource) MergeSecurityDetectionsAPIExternalRuleSource(v SecurityDetectionsAPIExternalRuleSource) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIInternalRuleSource returns the union data inside the SecurityDetectionsAPIRuleSource as a SecurityDetectionsAPIInternalRuleSource +func (t SecurityDetectionsAPIRuleSource) AsSecurityDetectionsAPIInternalRuleSource() (SecurityDetectionsAPIInternalRuleSource, error) { + var body SecurityDetectionsAPIInternalRuleSource + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIInternalRuleSource overwrites any union data inside the SecurityDetectionsAPIRuleSource as the provided SecurityDetectionsAPIInternalRuleSource +func (t *SecurityDetectionsAPIRuleSource) FromSecurityDetectionsAPIInternalRuleSource(v SecurityDetectionsAPIInternalRuleSource) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIInternalRuleSource performs a merge with any union data inside the SecurityDetectionsAPIRuleSource, using the provided SecurityDetectionsAPIInternalRuleSource +func (t *SecurityDetectionsAPIRuleSource) MergeSecurityDetectionsAPIInternalRuleSource(v SecurityDetectionsAPIInternalRuleSource) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIRuleSource) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIRuleSource) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIEqlRuleUpdateProps returns the union data inside the SecurityDetectionsAPIRuleUpdateProps as a SecurityDetectionsAPIEqlRuleUpdateProps +func (t SecurityDetectionsAPIRuleUpdateProps) AsSecurityDetectionsAPIEqlRuleUpdateProps() (SecurityDetectionsAPIEqlRuleUpdateProps, error) { + var body SecurityDetectionsAPIEqlRuleUpdateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIEqlRuleUpdateProps overwrites any union data inside the SecurityDetectionsAPIRuleUpdateProps as the provided SecurityDetectionsAPIEqlRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) FromSecurityDetectionsAPIEqlRuleUpdateProps(v SecurityDetectionsAPIEqlRuleUpdateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIEqlRuleUpdateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleUpdateProps, using the provided SecurityDetectionsAPIEqlRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) MergeSecurityDetectionsAPIEqlRuleUpdateProps(v SecurityDetectionsAPIEqlRuleUpdateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIQueryRuleUpdateProps returns the union data inside the SecurityDetectionsAPIRuleUpdateProps as a SecurityDetectionsAPIQueryRuleUpdateProps +func (t SecurityDetectionsAPIRuleUpdateProps) AsSecurityDetectionsAPIQueryRuleUpdateProps() (SecurityDetectionsAPIQueryRuleUpdateProps, error) { + var body SecurityDetectionsAPIQueryRuleUpdateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIQueryRuleUpdateProps overwrites any union data inside the SecurityDetectionsAPIRuleUpdateProps as the provided SecurityDetectionsAPIQueryRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) FromSecurityDetectionsAPIQueryRuleUpdateProps(v SecurityDetectionsAPIQueryRuleUpdateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIQueryRuleUpdateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleUpdateProps, using the provided SecurityDetectionsAPIQueryRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) MergeSecurityDetectionsAPIQueryRuleUpdateProps(v SecurityDetectionsAPIQueryRuleUpdateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPISavedQueryRuleUpdateProps returns the union data inside the SecurityDetectionsAPIRuleUpdateProps as a SecurityDetectionsAPISavedQueryRuleUpdateProps +func (t SecurityDetectionsAPIRuleUpdateProps) AsSecurityDetectionsAPISavedQueryRuleUpdateProps() (SecurityDetectionsAPISavedQueryRuleUpdateProps, error) { + var body SecurityDetectionsAPISavedQueryRuleUpdateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPISavedQueryRuleUpdateProps overwrites any union data inside the SecurityDetectionsAPIRuleUpdateProps as the provided SecurityDetectionsAPISavedQueryRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) FromSecurityDetectionsAPISavedQueryRuleUpdateProps(v SecurityDetectionsAPISavedQueryRuleUpdateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPISavedQueryRuleUpdateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleUpdateProps, using the provided SecurityDetectionsAPISavedQueryRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) MergeSecurityDetectionsAPISavedQueryRuleUpdateProps(v SecurityDetectionsAPISavedQueryRuleUpdateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIThresholdRuleUpdateProps returns the union data inside the SecurityDetectionsAPIRuleUpdateProps as a SecurityDetectionsAPIThresholdRuleUpdateProps +func (t SecurityDetectionsAPIRuleUpdateProps) AsSecurityDetectionsAPIThresholdRuleUpdateProps() (SecurityDetectionsAPIThresholdRuleUpdateProps, error) { + var body SecurityDetectionsAPIThresholdRuleUpdateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIThresholdRuleUpdateProps overwrites any union data inside the SecurityDetectionsAPIRuleUpdateProps as the provided SecurityDetectionsAPIThresholdRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) FromSecurityDetectionsAPIThresholdRuleUpdateProps(v SecurityDetectionsAPIThresholdRuleUpdateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIThresholdRuleUpdateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleUpdateProps, using the provided SecurityDetectionsAPIThresholdRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) MergeSecurityDetectionsAPIThresholdRuleUpdateProps(v SecurityDetectionsAPIThresholdRuleUpdateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIThreatMatchRuleUpdateProps returns the union data inside the SecurityDetectionsAPIRuleUpdateProps as a SecurityDetectionsAPIThreatMatchRuleUpdateProps +func (t SecurityDetectionsAPIRuleUpdateProps) AsSecurityDetectionsAPIThreatMatchRuleUpdateProps() (SecurityDetectionsAPIThreatMatchRuleUpdateProps, error) { + var body SecurityDetectionsAPIThreatMatchRuleUpdateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIThreatMatchRuleUpdateProps overwrites any union data inside the SecurityDetectionsAPIRuleUpdateProps as the provided SecurityDetectionsAPIThreatMatchRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) FromSecurityDetectionsAPIThreatMatchRuleUpdateProps(v SecurityDetectionsAPIThreatMatchRuleUpdateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIThreatMatchRuleUpdateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleUpdateProps, using the provided SecurityDetectionsAPIThreatMatchRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) MergeSecurityDetectionsAPIThreatMatchRuleUpdateProps(v SecurityDetectionsAPIThreatMatchRuleUpdateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIMachineLearningRuleUpdateProps returns the union data inside the SecurityDetectionsAPIRuleUpdateProps as a SecurityDetectionsAPIMachineLearningRuleUpdateProps +func (t SecurityDetectionsAPIRuleUpdateProps) AsSecurityDetectionsAPIMachineLearningRuleUpdateProps() (SecurityDetectionsAPIMachineLearningRuleUpdateProps, error) { + var body SecurityDetectionsAPIMachineLearningRuleUpdateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIMachineLearningRuleUpdateProps overwrites any union data inside the SecurityDetectionsAPIRuleUpdateProps as the provided SecurityDetectionsAPIMachineLearningRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) FromSecurityDetectionsAPIMachineLearningRuleUpdateProps(v SecurityDetectionsAPIMachineLearningRuleUpdateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIMachineLearningRuleUpdateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleUpdateProps, using the provided SecurityDetectionsAPIMachineLearningRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) MergeSecurityDetectionsAPIMachineLearningRuleUpdateProps(v SecurityDetectionsAPIMachineLearningRuleUpdateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPINewTermsRuleUpdateProps returns the union data inside the SecurityDetectionsAPIRuleUpdateProps as a SecurityDetectionsAPINewTermsRuleUpdateProps +func (t SecurityDetectionsAPIRuleUpdateProps) AsSecurityDetectionsAPINewTermsRuleUpdateProps() (SecurityDetectionsAPINewTermsRuleUpdateProps, error) { + var body SecurityDetectionsAPINewTermsRuleUpdateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPINewTermsRuleUpdateProps overwrites any union data inside the SecurityDetectionsAPIRuleUpdateProps as the provided SecurityDetectionsAPINewTermsRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) FromSecurityDetectionsAPINewTermsRuleUpdateProps(v SecurityDetectionsAPINewTermsRuleUpdateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPINewTermsRuleUpdateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleUpdateProps, using the provided SecurityDetectionsAPINewTermsRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) MergeSecurityDetectionsAPINewTermsRuleUpdateProps(v SecurityDetectionsAPINewTermsRuleUpdateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIEsqlRuleUpdateProps returns the union data inside the SecurityDetectionsAPIRuleUpdateProps as a SecurityDetectionsAPIEsqlRuleUpdateProps +func (t SecurityDetectionsAPIRuleUpdateProps) AsSecurityDetectionsAPIEsqlRuleUpdateProps() (SecurityDetectionsAPIEsqlRuleUpdateProps, error) { + var body SecurityDetectionsAPIEsqlRuleUpdateProps + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIEsqlRuleUpdateProps overwrites any union data inside the SecurityDetectionsAPIRuleUpdateProps as the provided SecurityDetectionsAPIEsqlRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) FromSecurityDetectionsAPIEsqlRuleUpdateProps(v SecurityDetectionsAPIEsqlRuleUpdateProps) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIEsqlRuleUpdateProps performs a merge with any union data inside the SecurityDetectionsAPIRuleUpdateProps, using the provided SecurityDetectionsAPIEsqlRuleUpdateProps +func (t *SecurityDetectionsAPIRuleUpdateProps) MergeSecurityDetectionsAPIEsqlRuleUpdateProps(v SecurityDetectionsAPIEsqlRuleUpdateProps) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIRuleUpdateProps) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIRuleUpdateProps) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityDetectionsAPIThresholdField0 returns the union data inside the SecurityDetectionsAPIThresholdField as a SecurityDetectionsAPIThresholdField0 +func (t SecurityDetectionsAPIThresholdField) AsSecurityDetectionsAPIThresholdField0() (SecurityDetectionsAPIThresholdField0, error) { + var body SecurityDetectionsAPIThresholdField0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIThresholdField0 overwrites any union data inside the SecurityDetectionsAPIThresholdField as the provided SecurityDetectionsAPIThresholdField0 +func (t *SecurityDetectionsAPIThresholdField) FromSecurityDetectionsAPIThresholdField0(v SecurityDetectionsAPIThresholdField0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIThresholdField0 performs a merge with any union data inside the SecurityDetectionsAPIThresholdField, using the provided SecurityDetectionsAPIThresholdField0 +func (t *SecurityDetectionsAPIThresholdField) MergeSecurityDetectionsAPIThresholdField0(v SecurityDetectionsAPIThresholdField0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityDetectionsAPIThresholdField1 returns the union data inside the SecurityDetectionsAPIThresholdField as a SecurityDetectionsAPIThresholdField1 +func (t SecurityDetectionsAPIThresholdField) AsSecurityDetectionsAPIThresholdField1() (SecurityDetectionsAPIThresholdField1, error) { + var body SecurityDetectionsAPIThresholdField1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityDetectionsAPIThresholdField1 overwrites any union data inside the SecurityDetectionsAPIThresholdField as the provided SecurityDetectionsAPIThresholdField1 +func (t *SecurityDetectionsAPIThresholdField) FromSecurityDetectionsAPIThresholdField1(v SecurityDetectionsAPIThresholdField1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityDetectionsAPIThresholdField1 performs a merge with any union data inside the SecurityDetectionsAPIThresholdField, using the provided SecurityDetectionsAPIThresholdField1 +func (t *SecurityDetectionsAPIThresholdField) MergeSecurityDetectionsAPIThresholdField1(v SecurityDetectionsAPIThresholdField1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityDetectionsAPIThresholdField) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityDetectionsAPIThresholdField) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEndpointExceptionsAPIExceptionList returns the union data inside the SecurityEndpointExceptionsAPIEndpointList as a SecurityEndpointExceptionsAPIExceptionList +func (t SecurityEndpointExceptionsAPIEndpointList) AsSecurityEndpointExceptionsAPIExceptionList() (SecurityEndpointExceptionsAPIExceptionList, error) { + var body SecurityEndpointExceptionsAPIExceptionList + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointExceptionsAPIExceptionList overwrites any union data inside the SecurityEndpointExceptionsAPIEndpointList as the provided SecurityEndpointExceptionsAPIExceptionList +func (t *SecurityEndpointExceptionsAPIEndpointList) FromSecurityEndpointExceptionsAPIExceptionList(v SecurityEndpointExceptionsAPIExceptionList) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointExceptionsAPIExceptionList performs a merge with any union data inside the SecurityEndpointExceptionsAPIEndpointList, using the provided SecurityEndpointExceptionsAPIExceptionList +func (t *SecurityEndpointExceptionsAPIEndpointList) MergeSecurityEndpointExceptionsAPIExceptionList(v SecurityEndpointExceptionsAPIExceptionList) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointExceptionsAPIEndpointList1 returns the union data inside the SecurityEndpointExceptionsAPIEndpointList as a SecurityEndpointExceptionsAPIEndpointList1 +func (t SecurityEndpointExceptionsAPIEndpointList) AsSecurityEndpointExceptionsAPIEndpointList1() (SecurityEndpointExceptionsAPIEndpointList1, error) { + var body SecurityEndpointExceptionsAPIEndpointList1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointExceptionsAPIEndpointList1 overwrites any union data inside the SecurityEndpointExceptionsAPIEndpointList as the provided SecurityEndpointExceptionsAPIEndpointList1 +func (t *SecurityEndpointExceptionsAPIEndpointList) FromSecurityEndpointExceptionsAPIEndpointList1(v SecurityEndpointExceptionsAPIEndpointList1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointExceptionsAPIEndpointList1 performs a merge with any union data inside the SecurityEndpointExceptionsAPIEndpointList, using the provided SecurityEndpointExceptionsAPIEndpointList1 +func (t *SecurityEndpointExceptionsAPIEndpointList) MergeSecurityEndpointExceptionsAPIEndpointList1(v SecurityEndpointExceptionsAPIEndpointList1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEndpointExceptionsAPIEndpointList) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEndpointExceptionsAPIEndpointList) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEndpointExceptionsAPIExceptionListItemEntryMatch returns the union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as a SecurityEndpointExceptionsAPIExceptionListItemEntryMatch +func (t SecurityEndpointExceptionsAPIExceptionListItemEntry) AsSecurityEndpointExceptionsAPIExceptionListItemEntryMatch() (SecurityEndpointExceptionsAPIExceptionListItemEntryMatch, error) { + var body SecurityEndpointExceptionsAPIExceptionListItemEntryMatch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointExceptionsAPIExceptionListItemEntryMatch overwrites any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as the provided SecurityEndpointExceptionsAPIExceptionListItemEntryMatch +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) FromSecurityEndpointExceptionsAPIExceptionListItemEntryMatch(v SecurityEndpointExceptionsAPIExceptionListItemEntryMatch) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointExceptionsAPIExceptionListItemEntryMatch performs a merge with any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry, using the provided SecurityEndpointExceptionsAPIExceptionListItemEntryMatch +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) MergeSecurityEndpointExceptionsAPIExceptionListItemEntryMatch(v SecurityEndpointExceptionsAPIExceptionListItemEntryMatch) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny returns the union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as a SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny +func (t SecurityEndpointExceptionsAPIExceptionListItemEntry) AsSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny() (SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny, error) { + var body SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny overwrites any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as the provided SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) FromSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny(v SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny performs a merge with any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry, using the provided SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) MergeSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny(v SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointExceptionsAPIExceptionListItemEntryList returns the union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as a SecurityEndpointExceptionsAPIExceptionListItemEntryList +func (t SecurityEndpointExceptionsAPIExceptionListItemEntry) AsSecurityEndpointExceptionsAPIExceptionListItemEntryList() (SecurityEndpointExceptionsAPIExceptionListItemEntryList, error) { + var body SecurityEndpointExceptionsAPIExceptionListItemEntryList + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointExceptionsAPIExceptionListItemEntryList overwrites any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as the provided SecurityEndpointExceptionsAPIExceptionListItemEntryList +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) FromSecurityEndpointExceptionsAPIExceptionListItemEntryList(v SecurityEndpointExceptionsAPIExceptionListItemEntryList) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointExceptionsAPIExceptionListItemEntryList performs a merge with any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry, using the provided SecurityEndpointExceptionsAPIExceptionListItemEntryList +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) MergeSecurityEndpointExceptionsAPIExceptionListItemEntryList(v SecurityEndpointExceptionsAPIExceptionListItemEntryList) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointExceptionsAPIExceptionListItemEntryExists returns the union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as a SecurityEndpointExceptionsAPIExceptionListItemEntryExists +func (t SecurityEndpointExceptionsAPIExceptionListItemEntry) AsSecurityEndpointExceptionsAPIExceptionListItemEntryExists() (SecurityEndpointExceptionsAPIExceptionListItemEntryExists, error) { + var body SecurityEndpointExceptionsAPIExceptionListItemEntryExists + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointExceptionsAPIExceptionListItemEntryExists overwrites any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as the provided SecurityEndpointExceptionsAPIExceptionListItemEntryExists +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) FromSecurityEndpointExceptionsAPIExceptionListItemEntryExists(v SecurityEndpointExceptionsAPIExceptionListItemEntryExists) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointExceptionsAPIExceptionListItemEntryExists performs a merge with any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry, using the provided SecurityEndpointExceptionsAPIExceptionListItemEntryExists +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) MergeSecurityEndpointExceptionsAPIExceptionListItemEntryExists(v SecurityEndpointExceptionsAPIExceptionListItemEntryExists) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointExceptionsAPIExceptionListItemEntryNested returns the union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as a SecurityEndpointExceptionsAPIExceptionListItemEntryNested +func (t SecurityEndpointExceptionsAPIExceptionListItemEntry) AsSecurityEndpointExceptionsAPIExceptionListItemEntryNested() (SecurityEndpointExceptionsAPIExceptionListItemEntryNested, error) { + var body SecurityEndpointExceptionsAPIExceptionListItemEntryNested + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointExceptionsAPIExceptionListItemEntryNested overwrites any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as the provided SecurityEndpointExceptionsAPIExceptionListItemEntryNested +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) FromSecurityEndpointExceptionsAPIExceptionListItemEntryNested(v SecurityEndpointExceptionsAPIExceptionListItemEntryNested) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointExceptionsAPIExceptionListItemEntryNested performs a merge with any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry, using the provided SecurityEndpointExceptionsAPIExceptionListItemEntryNested +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) MergeSecurityEndpointExceptionsAPIExceptionListItemEntryNested(v SecurityEndpointExceptionsAPIExceptionListItemEntryNested) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard returns the union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as a SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard +func (t SecurityEndpointExceptionsAPIExceptionListItemEntry) AsSecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard() (SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard, error) { + var body SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard overwrites any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry as the provided SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) FromSecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard(v SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard performs a merge with any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntry, using the provided SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) MergeSecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard(v SecurityEndpointExceptionsAPIExceptionListItemEntryMatchWildcard) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEndpointExceptionsAPIExceptionListItemEntry) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntry) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEndpointExceptionsAPIExceptionListItemEntryMatch returns the union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem as a SecurityEndpointExceptionsAPIExceptionListItemEntryMatch +func (t SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem) AsSecurityEndpointExceptionsAPIExceptionListItemEntryMatch() (SecurityEndpointExceptionsAPIExceptionListItemEntryMatch, error) { + var body SecurityEndpointExceptionsAPIExceptionListItemEntryMatch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointExceptionsAPIExceptionListItemEntryMatch overwrites any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem as the provided SecurityEndpointExceptionsAPIExceptionListItemEntryMatch +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem) FromSecurityEndpointExceptionsAPIExceptionListItemEntryMatch(v SecurityEndpointExceptionsAPIExceptionListItemEntryMatch) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointExceptionsAPIExceptionListItemEntryMatch performs a merge with any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem, using the provided SecurityEndpointExceptionsAPIExceptionListItemEntryMatch +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem) MergeSecurityEndpointExceptionsAPIExceptionListItemEntryMatch(v SecurityEndpointExceptionsAPIExceptionListItemEntryMatch) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny returns the union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem as a SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny +func (t SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem) AsSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny() (SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny, error) { + var body SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny overwrites any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem as the provided SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem) FromSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny(v SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny performs a merge with any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem, using the provided SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem) MergeSecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny(v SecurityEndpointExceptionsAPIExceptionListItemEntryMatchAny) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointExceptionsAPIExceptionListItemEntryExists returns the union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem as a SecurityEndpointExceptionsAPIExceptionListItemEntryExists +func (t SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem) AsSecurityEndpointExceptionsAPIExceptionListItemEntryExists() (SecurityEndpointExceptionsAPIExceptionListItemEntryExists, error) { + var body SecurityEndpointExceptionsAPIExceptionListItemEntryExists + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointExceptionsAPIExceptionListItemEntryExists overwrites any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem as the provided SecurityEndpointExceptionsAPIExceptionListItemEntryExists +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem) FromSecurityEndpointExceptionsAPIExceptionListItemEntryExists(v SecurityEndpointExceptionsAPIExceptionListItemEntryExists) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointExceptionsAPIExceptionListItemEntryExists performs a merge with any union data inside the SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem, using the provided SecurityEndpointExceptionsAPIExceptionListItemEntryExists +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem) MergeSecurityEndpointExceptionsAPIExceptionListItemEntryExists(v SecurityEndpointExceptionsAPIExceptionListItemEntryExists) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEndpointExceptionsAPIExceptionListItemEntryNestedEntryItem) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEndpointManagementAPIAgentIds0 returns the union data inside the SecurityEndpointManagementAPIAgentIds as a SecurityEndpointManagementAPIAgentIds0 +func (t SecurityEndpointManagementAPIAgentIds) AsSecurityEndpointManagementAPIAgentIds0() (SecurityEndpointManagementAPIAgentIds0, error) { + var body SecurityEndpointManagementAPIAgentIds0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIAgentIds0 overwrites any union data inside the SecurityEndpointManagementAPIAgentIds as the provided SecurityEndpointManagementAPIAgentIds0 +func (t *SecurityEndpointManagementAPIAgentIds) FromSecurityEndpointManagementAPIAgentIds0(v SecurityEndpointManagementAPIAgentIds0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIAgentIds0 performs a merge with any union data inside the SecurityEndpointManagementAPIAgentIds, using the provided SecurityEndpointManagementAPIAgentIds0 +func (t *SecurityEndpointManagementAPIAgentIds) MergeSecurityEndpointManagementAPIAgentIds0(v SecurityEndpointManagementAPIAgentIds0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointManagementAPIAgentIds1 returns the union data inside the SecurityEndpointManagementAPIAgentIds as a SecurityEndpointManagementAPIAgentIds1 +func (t SecurityEndpointManagementAPIAgentIds) AsSecurityEndpointManagementAPIAgentIds1() (SecurityEndpointManagementAPIAgentIds1, error) { + var body SecurityEndpointManagementAPIAgentIds1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIAgentIds1 overwrites any union data inside the SecurityEndpointManagementAPIAgentIds as the provided SecurityEndpointManagementAPIAgentIds1 +func (t *SecurityEndpointManagementAPIAgentIds) FromSecurityEndpointManagementAPIAgentIds1(v SecurityEndpointManagementAPIAgentIds1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIAgentIds1 performs a merge with any union data inside the SecurityEndpointManagementAPIAgentIds, using the provided SecurityEndpointManagementAPIAgentIds1 +func (t *SecurityEndpointManagementAPIAgentIds) MergeSecurityEndpointManagementAPIAgentIds1(v SecurityEndpointManagementAPIAgentIds1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEndpointManagementAPIAgentIds) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEndpointManagementAPIAgentIds) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0 returns the union data inside the SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters as a SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0 +func (t SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters) AsSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0() (SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0, error) { + var body SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0 overwrites any union data inside the SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters as the provided SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0 +func (t *SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters) FromSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0(v SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0 performs a merge with any union data inside the SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters, using the provided SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0 +func (t *SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters) MergeSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0(v SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1 returns the union data inside the SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters as a SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1 +func (t SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters) AsSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1() (SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1, error) { + var body SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1 overwrites any union data inside the SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters as the provided SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1 +func (t *SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters) FromSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1(v SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1 performs a merge with any union data inside the SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters, using the provided SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1 +func (t *SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters) MergeSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1(v SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2 returns the union data inside the SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters as a SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2 +func (t SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters) AsSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2() (SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2, error) { + var body SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2 overwrites any union data inside the SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters as the provided SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2 +func (t *SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters) FromSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2(v SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2 performs a merge with any union data inside the SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters, using the provided SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2 +func (t *SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters) MergeSecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2(v SecurityEndpointManagementAPIKillProcessRouteRequestBodyParameters2) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEndpointManagementAPIKillProcessRouteRequestBody_Parameters) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEndpointManagementAPIPendingActionsSchema0 returns the union data inside the SecurityEndpointManagementAPIPendingActionsSchema as a SecurityEndpointManagementAPIPendingActionsSchema0 +func (t SecurityEndpointManagementAPIPendingActionsSchema) AsSecurityEndpointManagementAPIPendingActionsSchema0() (SecurityEndpointManagementAPIPendingActionsSchema0, error) { + var body SecurityEndpointManagementAPIPendingActionsSchema0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIPendingActionsSchema0 overwrites any union data inside the SecurityEndpointManagementAPIPendingActionsSchema as the provided SecurityEndpointManagementAPIPendingActionsSchema0 +func (t *SecurityEndpointManagementAPIPendingActionsSchema) FromSecurityEndpointManagementAPIPendingActionsSchema0(v SecurityEndpointManagementAPIPendingActionsSchema0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIPendingActionsSchema0 performs a merge with any union data inside the SecurityEndpointManagementAPIPendingActionsSchema, using the provided SecurityEndpointManagementAPIPendingActionsSchema0 +func (t *SecurityEndpointManagementAPIPendingActionsSchema) MergeSecurityEndpointManagementAPIPendingActionsSchema0(v SecurityEndpointManagementAPIPendingActionsSchema0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointManagementAPIPendingActionsSchema1 returns the union data inside the SecurityEndpointManagementAPIPendingActionsSchema as a SecurityEndpointManagementAPIPendingActionsSchema1 +func (t SecurityEndpointManagementAPIPendingActionsSchema) AsSecurityEndpointManagementAPIPendingActionsSchema1() (SecurityEndpointManagementAPIPendingActionsSchema1, error) { + var body SecurityEndpointManagementAPIPendingActionsSchema1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIPendingActionsSchema1 overwrites any union data inside the SecurityEndpointManagementAPIPendingActionsSchema as the provided SecurityEndpointManagementAPIPendingActionsSchema1 +func (t *SecurityEndpointManagementAPIPendingActionsSchema) FromSecurityEndpointManagementAPIPendingActionsSchema1(v SecurityEndpointManagementAPIPendingActionsSchema1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIPendingActionsSchema1 performs a merge with any union data inside the SecurityEndpointManagementAPIPendingActionsSchema, using the provided SecurityEndpointManagementAPIPendingActionsSchema1 +func (t *SecurityEndpointManagementAPIPendingActionsSchema) MergeSecurityEndpointManagementAPIPendingActionsSchema1(v SecurityEndpointManagementAPIPendingActionsSchema1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEndpointManagementAPIPendingActionsSchema) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEndpointManagementAPIPendingActionsSchema) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0 returns the union data inside the SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content as a SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0 +func (t SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content) AsSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0() (SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0, error) { + var body SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0 overwrites any union data inside the SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content as the provided SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0 +func (t *SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content) FromSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0(v SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0 performs a merge with any union data inside the SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content, using the provided SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0 +func (t *SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content) MergeSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0(v SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1 returns the union data inside the SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content as a SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1 +func (t SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content) AsSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1() (SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1, error) { + var body SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1 overwrites any union data inside the SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content as the provided SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1 +func (t *SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content) FromSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1(v SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1 performs a merge with any union data inside the SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content, using the provided SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1 +func (t *SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content) MergeSecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1(v SecurityEndpointManagementAPIResponseActionCreateSuccessResponseDataOutputsContent1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEndpointManagementAPIResponseActionCreateSuccessResponse_Data_Outputs_Content) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEndpointManagementAPIRawScriptParameters returns the union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters as a SecurityEndpointManagementAPIRawScriptParameters +func (t SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) AsSecurityEndpointManagementAPIRawScriptParameters() (SecurityEndpointManagementAPIRawScriptParameters, error) { + var body SecurityEndpointManagementAPIRawScriptParameters + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIRawScriptParameters overwrites any union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters as the provided SecurityEndpointManagementAPIRawScriptParameters +func (t *SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) FromSecurityEndpointManagementAPIRawScriptParameters(v SecurityEndpointManagementAPIRawScriptParameters) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIRawScriptParameters performs a merge with any union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters, using the provided SecurityEndpointManagementAPIRawScriptParameters +func (t *SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) MergeSecurityEndpointManagementAPIRawScriptParameters(v SecurityEndpointManagementAPIRawScriptParameters) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointManagementAPIHostPathScriptParameters returns the union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters as a SecurityEndpointManagementAPIHostPathScriptParameters +func (t SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) AsSecurityEndpointManagementAPIHostPathScriptParameters() (SecurityEndpointManagementAPIHostPathScriptParameters, error) { + var body SecurityEndpointManagementAPIHostPathScriptParameters + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIHostPathScriptParameters overwrites any union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters as the provided SecurityEndpointManagementAPIHostPathScriptParameters +func (t *SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) FromSecurityEndpointManagementAPIHostPathScriptParameters(v SecurityEndpointManagementAPIHostPathScriptParameters) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIHostPathScriptParameters performs a merge with any union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters, using the provided SecurityEndpointManagementAPIHostPathScriptParameters +func (t *SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) MergeSecurityEndpointManagementAPIHostPathScriptParameters(v SecurityEndpointManagementAPIHostPathScriptParameters) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointManagementAPICloudFileScriptParameters returns the union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters as a SecurityEndpointManagementAPICloudFileScriptParameters +func (t SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) AsSecurityEndpointManagementAPICloudFileScriptParameters() (SecurityEndpointManagementAPICloudFileScriptParameters, error) { + var body SecurityEndpointManagementAPICloudFileScriptParameters + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPICloudFileScriptParameters overwrites any union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters as the provided SecurityEndpointManagementAPICloudFileScriptParameters +func (t *SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) FromSecurityEndpointManagementAPICloudFileScriptParameters(v SecurityEndpointManagementAPICloudFileScriptParameters) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPICloudFileScriptParameters performs a merge with any union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters, using the provided SecurityEndpointManagementAPICloudFileScriptParameters +func (t *SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) MergeSecurityEndpointManagementAPICloudFileScriptParameters(v SecurityEndpointManagementAPICloudFileScriptParameters) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointManagementAPISentinelOneRunScriptParameters returns the union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters as a SecurityEndpointManagementAPISentinelOneRunScriptParameters +func (t SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) AsSecurityEndpointManagementAPISentinelOneRunScriptParameters() (SecurityEndpointManagementAPISentinelOneRunScriptParameters, error) { + var body SecurityEndpointManagementAPISentinelOneRunScriptParameters + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPISentinelOneRunScriptParameters overwrites any union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters as the provided SecurityEndpointManagementAPISentinelOneRunScriptParameters +func (t *SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) FromSecurityEndpointManagementAPISentinelOneRunScriptParameters(v SecurityEndpointManagementAPISentinelOneRunScriptParameters) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPISentinelOneRunScriptParameters performs a merge with any union data inside the SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters, using the provided SecurityEndpointManagementAPISentinelOneRunScriptParameters +func (t *SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) MergeSecurityEndpointManagementAPISentinelOneRunScriptParameters(v SecurityEndpointManagementAPISentinelOneRunScriptParameters) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEndpointManagementAPIRunScriptRouteRequestBody_Parameters) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0 returns the union data inside the SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters as a SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0 +func (t SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters) AsSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0() (SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0, error) { + var body SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0 overwrites any union data inside the SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters as the provided SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0 +func (t *SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters) FromSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0(v SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0 performs a merge with any union data inside the SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters, using the provided SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0 +func (t *SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters) MergeSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0(v SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1 returns the union data inside the SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters as a SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1 +func (t SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters) AsSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1() (SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1, error) { + var body SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1 overwrites any union data inside the SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters as the provided SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1 +func (t *SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters) FromSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1(v SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1 performs a merge with any union data inside the SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters, using the provided SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1 +func (t *SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters) MergeSecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1(v SecurityEndpointManagementAPISuspendProcessRouteRequestBodyParameters1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEndpointManagementAPISuspendProcessRouteRequestBody_Parameters) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEndpointManagementAPIUserIds0 returns the union data inside the SecurityEndpointManagementAPIUserIds as a SecurityEndpointManagementAPIUserIds0 +func (t SecurityEndpointManagementAPIUserIds) AsSecurityEndpointManagementAPIUserIds0() (SecurityEndpointManagementAPIUserIds0, error) { + var body SecurityEndpointManagementAPIUserIds0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIUserIds0 overwrites any union data inside the SecurityEndpointManagementAPIUserIds as the provided SecurityEndpointManagementAPIUserIds0 +func (t *SecurityEndpointManagementAPIUserIds) FromSecurityEndpointManagementAPIUserIds0(v SecurityEndpointManagementAPIUserIds0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIUserIds0 performs a merge with any union data inside the SecurityEndpointManagementAPIUserIds, using the provided SecurityEndpointManagementAPIUserIds0 +func (t *SecurityEndpointManagementAPIUserIds) MergeSecurityEndpointManagementAPIUserIds0(v SecurityEndpointManagementAPIUserIds0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointManagementAPIUserIds1 returns the union data inside the SecurityEndpointManagementAPIUserIds as a SecurityEndpointManagementAPIUserIds1 +func (t SecurityEndpointManagementAPIUserIds) AsSecurityEndpointManagementAPIUserIds1() (SecurityEndpointManagementAPIUserIds1, error) { + var body SecurityEndpointManagementAPIUserIds1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIUserIds1 overwrites any union data inside the SecurityEndpointManagementAPIUserIds as the provided SecurityEndpointManagementAPIUserIds1 +func (t *SecurityEndpointManagementAPIUserIds) FromSecurityEndpointManagementAPIUserIds1(v SecurityEndpointManagementAPIUserIds1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIUserIds1 performs a merge with any union data inside the SecurityEndpointManagementAPIUserIds, using the provided SecurityEndpointManagementAPIUserIds1 +func (t *SecurityEndpointManagementAPIUserIds) MergeSecurityEndpointManagementAPIUserIds1(v SecurityEndpointManagementAPIUserIds1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEndpointManagementAPIUserIds) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEndpointManagementAPIUserIds) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEndpointManagementAPIWithOutputs0 returns the union data inside the SecurityEndpointManagementAPIWithOutputs as a SecurityEndpointManagementAPIWithOutputs0 +func (t SecurityEndpointManagementAPIWithOutputs) AsSecurityEndpointManagementAPIWithOutputs0() (SecurityEndpointManagementAPIWithOutputs0, error) { + var body SecurityEndpointManagementAPIWithOutputs0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIWithOutputs0 overwrites any union data inside the SecurityEndpointManagementAPIWithOutputs as the provided SecurityEndpointManagementAPIWithOutputs0 +func (t *SecurityEndpointManagementAPIWithOutputs) FromSecurityEndpointManagementAPIWithOutputs0(v SecurityEndpointManagementAPIWithOutputs0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIWithOutputs0 performs a merge with any union data inside the SecurityEndpointManagementAPIWithOutputs, using the provided SecurityEndpointManagementAPIWithOutputs0 +func (t *SecurityEndpointManagementAPIWithOutputs) MergeSecurityEndpointManagementAPIWithOutputs0(v SecurityEndpointManagementAPIWithOutputs0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEndpointManagementAPIWithOutputs1 returns the union data inside the SecurityEndpointManagementAPIWithOutputs as a SecurityEndpointManagementAPIWithOutputs1 +func (t SecurityEndpointManagementAPIWithOutputs) AsSecurityEndpointManagementAPIWithOutputs1() (SecurityEndpointManagementAPIWithOutputs1, error) { + var body SecurityEndpointManagementAPIWithOutputs1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEndpointManagementAPIWithOutputs1 overwrites any union data inside the SecurityEndpointManagementAPIWithOutputs as the provided SecurityEndpointManagementAPIWithOutputs1 +func (t *SecurityEndpointManagementAPIWithOutputs) FromSecurityEndpointManagementAPIWithOutputs1(v SecurityEndpointManagementAPIWithOutputs1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEndpointManagementAPIWithOutputs1 performs a merge with any union data inside the SecurityEndpointManagementAPIWithOutputs, using the provided SecurityEndpointManagementAPIWithOutputs1 +func (t *SecurityEndpointManagementAPIWithOutputs) MergeSecurityEndpointManagementAPIWithOutputs1(v SecurityEndpointManagementAPIWithOutputs1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEndpointManagementAPIWithOutputs) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEndpointManagementAPIWithOutputs) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityEntityAnalyticsAPIUserEntity returns the union data inside the SecurityEntityAnalyticsAPIEntity as a SecurityEntityAnalyticsAPIUserEntity +func (t SecurityEntityAnalyticsAPIEntity) AsSecurityEntityAnalyticsAPIUserEntity() (SecurityEntityAnalyticsAPIUserEntity, error) { + var body SecurityEntityAnalyticsAPIUserEntity + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEntityAnalyticsAPIUserEntity overwrites any union data inside the SecurityEntityAnalyticsAPIEntity as the provided SecurityEntityAnalyticsAPIUserEntity +func (t *SecurityEntityAnalyticsAPIEntity) FromSecurityEntityAnalyticsAPIUserEntity(v SecurityEntityAnalyticsAPIUserEntity) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEntityAnalyticsAPIUserEntity performs a merge with any union data inside the SecurityEntityAnalyticsAPIEntity, using the provided SecurityEntityAnalyticsAPIUserEntity +func (t *SecurityEntityAnalyticsAPIEntity) MergeSecurityEntityAnalyticsAPIUserEntity(v SecurityEntityAnalyticsAPIUserEntity) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEntityAnalyticsAPIHostEntity returns the union data inside the SecurityEntityAnalyticsAPIEntity as a SecurityEntityAnalyticsAPIHostEntity +func (t SecurityEntityAnalyticsAPIEntity) AsSecurityEntityAnalyticsAPIHostEntity() (SecurityEntityAnalyticsAPIHostEntity, error) { + var body SecurityEntityAnalyticsAPIHostEntity + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEntityAnalyticsAPIHostEntity overwrites any union data inside the SecurityEntityAnalyticsAPIEntity as the provided SecurityEntityAnalyticsAPIHostEntity +func (t *SecurityEntityAnalyticsAPIEntity) FromSecurityEntityAnalyticsAPIHostEntity(v SecurityEntityAnalyticsAPIHostEntity) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEntityAnalyticsAPIHostEntity performs a merge with any union data inside the SecurityEntityAnalyticsAPIEntity, using the provided SecurityEntityAnalyticsAPIHostEntity +func (t *SecurityEntityAnalyticsAPIEntity) MergeSecurityEntityAnalyticsAPIHostEntity(v SecurityEntityAnalyticsAPIHostEntity) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEntityAnalyticsAPIServiceEntity returns the union data inside the SecurityEntityAnalyticsAPIEntity as a SecurityEntityAnalyticsAPIServiceEntity +func (t SecurityEntityAnalyticsAPIEntity) AsSecurityEntityAnalyticsAPIServiceEntity() (SecurityEntityAnalyticsAPIServiceEntity, error) { + var body SecurityEntityAnalyticsAPIServiceEntity + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEntityAnalyticsAPIServiceEntity overwrites any union data inside the SecurityEntityAnalyticsAPIEntity as the provided SecurityEntityAnalyticsAPIServiceEntity +func (t *SecurityEntityAnalyticsAPIEntity) FromSecurityEntityAnalyticsAPIServiceEntity(v SecurityEntityAnalyticsAPIServiceEntity) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEntityAnalyticsAPIServiceEntity performs a merge with any union data inside the SecurityEntityAnalyticsAPIEntity, using the provided SecurityEntityAnalyticsAPIServiceEntity +func (t *SecurityEntityAnalyticsAPIEntity) MergeSecurityEntityAnalyticsAPIServiceEntity(v SecurityEntityAnalyticsAPIServiceEntity) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityEntityAnalyticsAPIGenericEntity returns the union data inside the SecurityEntityAnalyticsAPIEntity as a SecurityEntityAnalyticsAPIGenericEntity +func (t SecurityEntityAnalyticsAPIEntity) AsSecurityEntityAnalyticsAPIGenericEntity() (SecurityEntityAnalyticsAPIGenericEntity, error) { + var body SecurityEntityAnalyticsAPIGenericEntity + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityEntityAnalyticsAPIGenericEntity overwrites any union data inside the SecurityEntityAnalyticsAPIEntity as the provided SecurityEntityAnalyticsAPIGenericEntity +func (t *SecurityEntityAnalyticsAPIEntity) FromSecurityEntityAnalyticsAPIGenericEntity(v SecurityEntityAnalyticsAPIGenericEntity) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityEntityAnalyticsAPIGenericEntity performs a merge with any union data inside the SecurityEntityAnalyticsAPIEntity, using the provided SecurityEntityAnalyticsAPIGenericEntity +func (t *SecurityEntityAnalyticsAPIEntity) MergeSecurityEntityAnalyticsAPIGenericEntity(v SecurityEntityAnalyticsAPIGenericEntity) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityEntityAnalyticsAPIEntity) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityEntityAnalyticsAPIEntity) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityExceptionsAPIExceptionListItemEntryMatch returns the union data inside the SecurityExceptionsAPIExceptionListItemEntry as a SecurityExceptionsAPIExceptionListItemEntryMatch +func (t SecurityExceptionsAPIExceptionListItemEntry) AsSecurityExceptionsAPIExceptionListItemEntryMatch() (SecurityExceptionsAPIExceptionListItemEntryMatch, error) { + var body SecurityExceptionsAPIExceptionListItemEntryMatch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityExceptionsAPIExceptionListItemEntryMatch overwrites any union data inside the SecurityExceptionsAPIExceptionListItemEntry as the provided SecurityExceptionsAPIExceptionListItemEntryMatch +func (t *SecurityExceptionsAPIExceptionListItemEntry) FromSecurityExceptionsAPIExceptionListItemEntryMatch(v SecurityExceptionsAPIExceptionListItemEntryMatch) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityExceptionsAPIExceptionListItemEntryMatch performs a merge with any union data inside the SecurityExceptionsAPIExceptionListItemEntry, using the provided SecurityExceptionsAPIExceptionListItemEntryMatch +func (t *SecurityExceptionsAPIExceptionListItemEntry) MergeSecurityExceptionsAPIExceptionListItemEntryMatch(v SecurityExceptionsAPIExceptionListItemEntryMatch) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityExceptionsAPIExceptionListItemEntryMatchAny returns the union data inside the SecurityExceptionsAPIExceptionListItemEntry as a SecurityExceptionsAPIExceptionListItemEntryMatchAny +func (t SecurityExceptionsAPIExceptionListItemEntry) AsSecurityExceptionsAPIExceptionListItemEntryMatchAny() (SecurityExceptionsAPIExceptionListItemEntryMatchAny, error) { + var body SecurityExceptionsAPIExceptionListItemEntryMatchAny + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityExceptionsAPIExceptionListItemEntryMatchAny overwrites any union data inside the SecurityExceptionsAPIExceptionListItemEntry as the provided SecurityExceptionsAPIExceptionListItemEntryMatchAny +func (t *SecurityExceptionsAPIExceptionListItemEntry) FromSecurityExceptionsAPIExceptionListItemEntryMatchAny(v SecurityExceptionsAPIExceptionListItemEntryMatchAny) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityExceptionsAPIExceptionListItemEntryMatchAny performs a merge with any union data inside the SecurityExceptionsAPIExceptionListItemEntry, using the provided SecurityExceptionsAPIExceptionListItemEntryMatchAny +func (t *SecurityExceptionsAPIExceptionListItemEntry) MergeSecurityExceptionsAPIExceptionListItemEntryMatchAny(v SecurityExceptionsAPIExceptionListItemEntryMatchAny) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityExceptionsAPIExceptionListItemEntryList returns the union data inside the SecurityExceptionsAPIExceptionListItemEntry as a SecurityExceptionsAPIExceptionListItemEntryList +func (t SecurityExceptionsAPIExceptionListItemEntry) AsSecurityExceptionsAPIExceptionListItemEntryList() (SecurityExceptionsAPIExceptionListItemEntryList, error) { + var body SecurityExceptionsAPIExceptionListItemEntryList + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityExceptionsAPIExceptionListItemEntryList overwrites any union data inside the SecurityExceptionsAPIExceptionListItemEntry as the provided SecurityExceptionsAPIExceptionListItemEntryList +func (t *SecurityExceptionsAPIExceptionListItemEntry) FromSecurityExceptionsAPIExceptionListItemEntryList(v SecurityExceptionsAPIExceptionListItemEntryList) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityExceptionsAPIExceptionListItemEntryList performs a merge with any union data inside the SecurityExceptionsAPIExceptionListItemEntry, using the provided SecurityExceptionsAPIExceptionListItemEntryList +func (t *SecurityExceptionsAPIExceptionListItemEntry) MergeSecurityExceptionsAPIExceptionListItemEntryList(v SecurityExceptionsAPIExceptionListItemEntryList) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityExceptionsAPIExceptionListItemEntryExists returns the union data inside the SecurityExceptionsAPIExceptionListItemEntry as a SecurityExceptionsAPIExceptionListItemEntryExists +func (t SecurityExceptionsAPIExceptionListItemEntry) AsSecurityExceptionsAPIExceptionListItemEntryExists() (SecurityExceptionsAPIExceptionListItemEntryExists, error) { + var body SecurityExceptionsAPIExceptionListItemEntryExists + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityExceptionsAPIExceptionListItemEntryExists overwrites any union data inside the SecurityExceptionsAPIExceptionListItemEntry as the provided SecurityExceptionsAPIExceptionListItemEntryExists +func (t *SecurityExceptionsAPIExceptionListItemEntry) FromSecurityExceptionsAPIExceptionListItemEntryExists(v SecurityExceptionsAPIExceptionListItemEntryExists) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityExceptionsAPIExceptionListItemEntryExists performs a merge with any union data inside the SecurityExceptionsAPIExceptionListItemEntry, using the provided SecurityExceptionsAPIExceptionListItemEntryExists +func (t *SecurityExceptionsAPIExceptionListItemEntry) MergeSecurityExceptionsAPIExceptionListItemEntryExists(v SecurityExceptionsAPIExceptionListItemEntryExists) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityExceptionsAPIExceptionListItemEntryNested returns the union data inside the SecurityExceptionsAPIExceptionListItemEntry as a SecurityExceptionsAPIExceptionListItemEntryNested +func (t SecurityExceptionsAPIExceptionListItemEntry) AsSecurityExceptionsAPIExceptionListItemEntryNested() (SecurityExceptionsAPIExceptionListItemEntryNested, error) { + var body SecurityExceptionsAPIExceptionListItemEntryNested + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityExceptionsAPIExceptionListItemEntryNested overwrites any union data inside the SecurityExceptionsAPIExceptionListItemEntry as the provided SecurityExceptionsAPIExceptionListItemEntryNested +func (t *SecurityExceptionsAPIExceptionListItemEntry) FromSecurityExceptionsAPIExceptionListItemEntryNested(v SecurityExceptionsAPIExceptionListItemEntryNested) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityExceptionsAPIExceptionListItemEntryNested performs a merge with any union data inside the SecurityExceptionsAPIExceptionListItemEntry, using the provided SecurityExceptionsAPIExceptionListItemEntryNested +func (t *SecurityExceptionsAPIExceptionListItemEntry) MergeSecurityExceptionsAPIExceptionListItemEntryNested(v SecurityExceptionsAPIExceptionListItemEntryNested) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityExceptionsAPIExceptionListItemEntryMatchWildcard returns the union data inside the SecurityExceptionsAPIExceptionListItemEntry as a SecurityExceptionsAPIExceptionListItemEntryMatchWildcard +func (t SecurityExceptionsAPIExceptionListItemEntry) AsSecurityExceptionsAPIExceptionListItemEntryMatchWildcard() (SecurityExceptionsAPIExceptionListItemEntryMatchWildcard, error) { + var body SecurityExceptionsAPIExceptionListItemEntryMatchWildcard + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityExceptionsAPIExceptionListItemEntryMatchWildcard overwrites any union data inside the SecurityExceptionsAPIExceptionListItemEntry as the provided SecurityExceptionsAPIExceptionListItemEntryMatchWildcard +func (t *SecurityExceptionsAPIExceptionListItemEntry) FromSecurityExceptionsAPIExceptionListItemEntryMatchWildcard(v SecurityExceptionsAPIExceptionListItemEntryMatchWildcard) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityExceptionsAPIExceptionListItemEntryMatchWildcard performs a merge with any union data inside the SecurityExceptionsAPIExceptionListItemEntry, using the provided SecurityExceptionsAPIExceptionListItemEntryMatchWildcard +func (t *SecurityExceptionsAPIExceptionListItemEntry) MergeSecurityExceptionsAPIExceptionListItemEntryMatchWildcard(v SecurityExceptionsAPIExceptionListItemEntryMatchWildcard) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityExceptionsAPIExceptionListItemEntry) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityExceptionsAPIExceptionListItemEntry) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityExceptionsAPIExceptionListItemEntryMatch returns the union data inside the SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem as a SecurityExceptionsAPIExceptionListItemEntryMatch +func (t SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem) AsSecurityExceptionsAPIExceptionListItemEntryMatch() (SecurityExceptionsAPIExceptionListItemEntryMatch, error) { + var body SecurityExceptionsAPIExceptionListItemEntryMatch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityExceptionsAPIExceptionListItemEntryMatch overwrites any union data inside the SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem as the provided SecurityExceptionsAPIExceptionListItemEntryMatch +func (t *SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem) FromSecurityExceptionsAPIExceptionListItemEntryMatch(v SecurityExceptionsAPIExceptionListItemEntryMatch) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityExceptionsAPIExceptionListItemEntryMatch performs a merge with any union data inside the SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem, using the provided SecurityExceptionsAPIExceptionListItemEntryMatch +func (t *SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem) MergeSecurityExceptionsAPIExceptionListItemEntryMatch(v SecurityExceptionsAPIExceptionListItemEntryMatch) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityExceptionsAPIExceptionListItemEntryMatchAny returns the union data inside the SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem as a SecurityExceptionsAPIExceptionListItemEntryMatchAny +func (t SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem) AsSecurityExceptionsAPIExceptionListItemEntryMatchAny() (SecurityExceptionsAPIExceptionListItemEntryMatchAny, error) { + var body SecurityExceptionsAPIExceptionListItemEntryMatchAny + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityExceptionsAPIExceptionListItemEntryMatchAny overwrites any union data inside the SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem as the provided SecurityExceptionsAPIExceptionListItemEntryMatchAny +func (t *SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem) FromSecurityExceptionsAPIExceptionListItemEntryMatchAny(v SecurityExceptionsAPIExceptionListItemEntryMatchAny) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityExceptionsAPIExceptionListItemEntryMatchAny performs a merge with any union data inside the SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem, using the provided SecurityExceptionsAPIExceptionListItemEntryMatchAny +func (t *SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem) MergeSecurityExceptionsAPIExceptionListItemEntryMatchAny(v SecurityExceptionsAPIExceptionListItemEntryMatchAny) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityExceptionsAPIExceptionListItemEntryExists returns the union data inside the SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem as a SecurityExceptionsAPIExceptionListItemEntryExists +func (t SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem) AsSecurityExceptionsAPIExceptionListItemEntryExists() (SecurityExceptionsAPIExceptionListItemEntryExists, error) { + var body SecurityExceptionsAPIExceptionListItemEntryExists + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityExceptionsAPIExceptionListItemEntryExists overwrites any union data inside the SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem as the provided SecurityExceptionsAPIExceptionListItemEntryExists +func (t *SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem) FromSecurityExceptionsAPIExceptionListItemEntryExists(v SecurityExceptionsAPIExceptionListItemEntryExists) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityExceptionsAPIExceptionListItemEntryExists performs a merge with any union data inside the SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem, using the provided SecurityExceptionsAPIExceptionListItemEntryExists +func (t *SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem) MergeSecurityExceptionsAPIExceptionListItemEntryExists(v SecurityExceptionsAPIExceptionListItemEntryExists) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityExceptionsAPIExceptionListItemEntryNestedEntryItem) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityOsqueryAPIECSMappingItemValue0 returns the union data inside the SecurityOsqueryAPIECSMappingItem_Value as a SecurityOsqueryAPIECSMappingItemValue0 +func (t SecurityOsqueryAPIECSMappingItem_Value) AsSecurityOsqueryAPIECSMappingItemValue0() (SecurityOsqueryAPIECSMappingItemValue0, error) { + var body SecurityOsqueryAPIECSMappingItemValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityOsqueryAPIECSMappingItemValue0 overwrites any union data inside the SecurityOsqueryAPIECSMappingItem_Value as the provided SecurityOsqueryAPIECSMappingItemValue0 +func (t *SecurityOsqueryAPIECSMappingItem_Value) FromSecurityOsqueryAPIECSMappingItemValue0(v SecurityOsqueryAPIECSMappingItemValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityOsqueryAPIECSMappingItemValue0 performs a merge with any union data inside the SecurityOsqueryAPIECSMappingItem_Value, using the provided SecurityOsqueryAPIECSMappingItemValue0 +func (t *SecurityOsqueryAPIECSMappingItem_Value) MergeSecurityOsqueryAPIECSMappingItemValue0(v SecurityOsqueryAPIECSMappingItemValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityOsqueryAPIECSMappingItemValue1 returns the union data inside the SecurityOsqueryAPIECSMappingItem_Value as a SecurityOsqueryAPIECSMappingItemValue1 +func (t SecurityOsqueryAPIECSMappingItem_Value) AsSecurityOsqueryAPIECSMappingItemValue1() (SecurityOsqueryAPIECSMappingItemValue1, error) { + var body SecurityOsqueryAPIECSMappingItemValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityOsqueryAPIECSMappingItemValue1 overwrites any union data inside the SecurityOsqueryAPIECSMappingItem_Value as the provided SecurityOsqueryAPIECSMappingItemValue1 +func (t *SecurityOsqueryAPIECSMappingItem_Value) FromSecurityOsqueryAPIECSMappingItemValue1(v SecurityOsqueryAPIECSMappingItemValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityOsqueryAPIECSMappingItemValue1 performs a merge with any union data inside the SecurityOsqueryAPIECSMappingItem_Value, using the provided SecurityOsqueryAPIECSMappingItemValue1 +func (t *SecurityOsqueryAPIECSMappingItem_Value) MergeSecurityOsqueryAPIECSMappingItemValue1(v SecurityOsqueryAPIECSMappingItemValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityOsqueryAPIECSMappingItem_Value) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityOsqueryAPIECSMappingItem_Value) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPIDocumentIds0 returns the union data inside the SecurityTimelineAPIDocumentIds as a SecurityTimelineAPIDocumentIds0 +func (t SecurityTimelineAPIDocumentIds) AsSecurityTimelineAPIDocumentIds0() (SecurityTimelineAPIDocumentIds0, error) { + var body SecurityTimelineAPIDocumentIds0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIDocumentIds0 overwrites any union data inside the SecurityTimelineAPIDocumentIds as the provided SecurityTimelineAPIDocumentIds0 +func (t *SecurityTimelineAPIDocumentIds) FromSecurityTimelineAPIDocumentIds0(v SecurityTimelineAPIDocumentIds0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIDocumentIds0 performs a merge with any union data inside the SecurityTimelineAPIDocumentIds, using the provided SecurityTimelineAPIDocumentIds0 +func (t *SecurityTimelineAPIDocumentIds) MergeSecurityTimelineAPIDocumentIds0(v SecurityTimelineAPIDocumentIds0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPIDocumentIds1 returns the union data inside the SecurityTimelineAPIDocumentIds as a SecurityTimelineAPIDocumentIds1 +func (t SecurityTimelineAPIDocumentIds) AsSecurityTimelineAPIDocumentIds1() (SecurityTimelineAPIDocumentIds1, error) { + var body SecurityTimelineAPIDocumentIds1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIDocumentIds1 overwrites any union data inside the SecurityTimelineAPIDocumentIds as the provided SecurityTimelineAPIDocumentIds1 +func (t *SecurityTimelineAPIDocumentIds) FromSecurityTimelineAPIDocumentIds1(v SecurityTimelineAPIDocumentIds1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIDocumentIds1 performs a merge with any union data inside the SecurityTimelineAPIDocumentIds, using the provided SecurityTimelineAPIDocumentIds1 +func (t *SecurityTimelineAPIDocumentIds) MergeSecurityTimelineAPIDocumentIds1(v SecurityTimelineAPIDocumentIds1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPIDocumentIds) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPIDocumentIds) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPIImportTimelinesDateRangeEnd0 returns the union data inside the SecurityTimelineAPIImportTimelines_DateRange_End as a SecurityTimelineAPIImportTimelinesDateRangeEnd0 +func (t SecurityTimelineAPIImportTimelines_DateRange_End) AsSecurityTimelineAPIImportTimelinesDateRangeEnd0() (SecurityTimelineAPIImportTimelinesDateRangeEnd0, error) { + var body SecurityTimelineAPIImportTimelinesDateRangeEnd0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIImportTimelinesDateRangeEnd0 overwrites any union data inside the SecurityTimelineAPIImportTimelines_DateRange_End as the provided SecurityTimelineAPIImportTimelinesDateRangeEnd0 +func (t *SecurityTimelineAPIImportTimelines_DateRange_End) FromSecurityTimelineAPIImportTimelinesDateRangeEnd0(v SecurityTimelineAPIImportTimelinesDateRangeEnd0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIImportTimelinesDateRangeEnd0 performs a merge with any union data inside the SecurityTimelineAPIImportTimelines_DateRange_End, using the provided SecurityTimelineAPIImportTimelinesDateRangeEnd0 +func (t *SecurityTimelineAPIImportTimelines_DateRange_End) MergeSecurityTimelineAPIImportTimelinesDateRangeEnd0(v SecurityTimelineAPIImportTimelinesDateRangeEnd0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPIImportTimelinesDateRangeEnd1 returns the union data inside the SecurityTimelineAPIImportTimelines_DateRange_End as a SecurityTimelineAPIImportTimelinesDateRangeEnd1 +func (t SecurityTimelineAPIImportTimelines_DateRange_End) AsSecurityTimelineAPIImportTimelinesDateRangeEnd1() (SecurityTimelineAPIImportTimelinesDateRangeEnd1, error) { + var body SecurityTimelineAPIImportTimelinesDateRangeEnd1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIImportTimelinesDateRangeEnd1 overwrites any union data inside the SecurityTimelineAPIImportTimelines_DateRange_End as the provided SecurityTimelineAPIImportTimelinesDateRangeEnd1 +func (t *SecurityTimelineAPIImportTimelines_DateRange_End) FromSecurityTimelineAPIImportTimelinesDateRangeEnd1(v SecurityTimelineAPIImportTimelinesDateRangeEnd1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIImportTimelinesDateRangeEnd1 performs a merge with any union data inside the SecurityTimelineAPIImportTimelines_DateRange_End, using the provided SecurityTimelineAPIImportTimelinesDateRangeEnd1 +func (t *SecurityTimelineAPIImportTimelines_DateRange_End) MergeSecurityTimelineAPIImportTimelinesDateRangeEnd1(v SecurityTimelineAPIImportTimelinesDateRangeEnd1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPIImportTimelines_DateRange_End) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPIImportTimelines_DateRange_End) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPIImportTimelinesDateRangeStart0 returns the union data inside the SecurityTimelineAPIImportTimelines_DateRange_Start as a SecurityTimelineAPIImportTimelinesDateRangeStart0 +func (t SecurityTimelineAPIImportTimelines_DateRange_Start) AsSecurityTimelineAPIImportTimelinesDateRangeStart0() (SecurityTimelineAPIImportTimelinesDateRangeStart0, error) { + var body SecurityTimelineAPIImportTimelinesDateRangeStart0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIImportTimelinesDateRangeStart0 overwrites any union data inside the SecurityTimelineAPIImportTimelines_DateRange_Start as the provided SecurityTimelineAPIImportTimelinesDateRangeStart0 +func (t *SecurityTimelineAPIImportTimelines_DateRange_Start) FromSecurityTimelineAPIImportTimelinesDateRangeStart0(v SecurityTimelineAPIImportTimelinesDateRangeStart0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIImportTimelinesDateRangeStart0 performs a merge with any union data inside the SecurityTimelineAPIImportTimelines_DateRange_Start, using the provided SecurityTimelineAPIImportTimelinesDateRangeStart0 +func (t *SecurityTimelineAPIImportTimelines_DateRange_Start) MergeSecurityTimelineAPIImportTimelinesDateRangeStart0(v SecurityTimelineAPIImportTimelinesDateRangeStart0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPIImportTimelinesDateRangeStart1 returns the union data inside the SecurityTimelineAPIImportTimelines_DateRange_Start as a SecurityTimelineAPIImportTimelinesDateRangeStart1 +func (t SecurityTimelineAPIImportTimelines_DateRange_Start) AsSecurityTimelineAPIImportTimelinesDateRangeStart1() (SecurityTimelineAPIImportTimelinesDateRangeStart1, error) { + var body SecurityTimelineAPIImportTimelinesDateRangeStart1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIImportTimelinesDateRangeStart1 overwrites any union data inside the SecurityTimelineAPIImportTimelines_DateRange_Start as the provided SecurityTimelineAPIImportTimelinesDateRangeStart1 +func (t *SecurityTimelineAPIImportTimelines_DateRange_Start) FromSecurityTimelineAPIImportTimelinesDateRangeStart1(v SecurityTimelineAPIImportTimelinesDateRangeStart1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIImportTimelinesDateRangeStart1 performs a merge with any union data inside the SecurityTimelineAPIImportTimelines_DateRange_Start, using the provided SecurityTimelineAPIImportTimelinesDateRangeStart1 +func (t *SecurityTimelineAPIImportTimelines_DateRange_Start) MergeSecurityTimelineAPIImportTimelinesDateRangeStart1(v SecurityTimelineAPIImportTimelinesDateRangeStart1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPIImportTimelines_DateRange_Start) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPIImportTimelines_DateRange_Start) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPIImportTimelinesEqlOptionsSize0 returns the union data inside the SecurityTimelineAPIImportTimelines_EqlOptions_Size as a SecurityTimelineAPIImportTimelinesEqlOptionsSize0 +func (t SecurityTimelineAPIImportTimelines_EqlOptions_Size) AsSecurityTimelineAPIImportTimelinesEqlOptionsSize0() (SecurityTimelineAPIImportTimelinesEqlOptionsSize0, error) { + var body SecurityTimelineAPIImportTimelinesEqlOptionsSize0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIImportTimelinesEqlOptionsSize0 overwrites any union data inside the SecurityTimelineAPIImportTimelines_EqlOptions_Size as the provided SecurityTimelineAPIImportTimelinesEqlOptionsSize0 +func (t *SecurityTimelineAPIImportTimelines_EqlOptions_Size) FromSecurityTimelineAPIImportTimelinesEqlOptionsSize0(v SecurityTimelineAPIImportTimelinesEqlOptionsSize0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIImportTimelinesEqlOptionsSize0 performs a merge with any union data inside the SecurityTimelineAPIImportTimelines_EqlOptions_Size, using the provided SecurityTimelineAPIImportTimelinesEqlOptionsSize0 +func (t *SecurityTimelineAPIImportTimelines_EqlOptions_Size) MergeSecurityTimelineAPIImportTimelinesEqlOptionsSize0(v SecurityTimelineAPIImportTimelinesEqlOptionsSize0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPIImportTimelinesEqlOptionsSize1 returns the union data inside the SecurityTimelineAPIImportTimelines_EqlOptions_Size as a SecurityTimelineAPIImportTimelinesEqlOptionsSize1 +func (t SecurityTimelineAPIImportTimelines_EqlOptions_Size) AsSecurityTimelineAPIImportTimelinesEqlOptionsSize1() (SecurityTimelineAPIImportTimelinesEqlOptionsSize1, error) { + var body SecurityTimelineAPIImportTimelinesEqlOptionsSize1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIImportTimelinesEqlOptionsSize1 overwrites any union data inside the SecurityTimelineAPIImportTimelines_EqlOptions_Size as the provided SecurityTimelineAPIImportTimelinesEqlOptionsSize1 +func (t *SecurityTimelineAPIImportTimelines_EqlOptions_Size) FromSecurityTimelineAPIImportTimelinesEqlOptionsSize1(v SecurityTimelineAPIImportTimelinesEqlOptionsSize1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIImportTimelinesEqlOptionsSize1 performs a merge with any union data inside the SecurityTimelineAPIImportTimelines_EqlOptions_Size, using the provided SecurityTimelineAPIImportTimelinesEqlOptionsSize1 +func (t *SecurityTimelineAPIImportTimelines_EqlOptions_Size) MergeSecurityTimelineAPIImportTimelinesEqlOptionsSize1(v SecurityTimelineAPIImportTimelinesEqlOptionsSize1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPIImportTimelines_EqlOptions_Size) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPIImportTimelines_EqlOptions_Size) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPIPinnedEvent returns the union data inside the SecurityTimelineAPIPersistPinnedEventResponse as a SecurityTimelineAPIPinnedEvent +func (t SecurityTimelineAPIPersistPinnedEventResponse) AsSecurityTimelineAPIPinnedEvent() (SecurityTimelineAPIPinnedEvent, error) { + var body SecurityTimelineAPIPinnedEvent + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIPinnedEvent overwrites any union data inside the SecurityTimelineAPIPersistPinnedEventResponse as the provided SecurityTimelineAPIPinnedEvent +func (t *SecurityTimelineAPIPersistPinnedEventResponse) FromSecurityTimelineAPIPinnedEvent(v SecurityTimelineAPIPinnedEvent) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIPinnedEvent performs a merge with any union data inside the SecurityTimelineAPIPersistPinnedEventResponse, using the provided SecurityTimelineAPIPinnedEvent +func (t *SecurityTimelineAPIPersistPinnedEventResponse) MergeSecurityTimelineAPIPinnedEvent(v SecurityTimelineAPIPinnedEvent) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPIPersistPinnedEventResponse1 returns the union data inside the SecurityTimelineAPIPersistPinnedEventResponse as a SecurityTimelineAPIPersistPinnedEventResponse1 +func (t SecurityTimelineAPIPersistPinnedEventResponse) AsSecurityTimelineAPIPersistPinnedEventResponse1() (SecurityTimelineAPIPersistPinnedEventResponse1, error) { + var body SecurityTimelineAPIPersistPinnedEventResponse1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIPersistPinnedEventResponse1 overwrites any union data inside the SecurityTimelineAPIPersistPinnedEventResponse as the provided SecurityTimelineAPIPersistPinnedEventResponse1 +func (t *SecurityTimelineAPIPersistPinnedEventResponse) FromSecurityTimelineAPIPersistPinnedEventResponse1(v SecurityTimelineAPIPersistPinnedEventResponse1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIPersistPinnedEventResponse1 performs a merge with any union data inside the SecurityTimelineAPIPersistPinnedEventResponse, using the provided SecurityTimelineAPIPersistPinnedEventResponse1 +func (t *SecurityTimelineAPIPersistPinnedEventResponse) MergeSecurityTimelineAPIPersistPinnedEventResponse1(v SecurityTimelineAPIPersistPinnedEventResponse1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPIPersistPinnedEventResponse) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPIPersistPinnedEventResponse) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPIQueryMatchResultValue0 returns the union data inside the SecurityTimelineAPIQueryMatchResult_Value as a SecurityTimelineAPIQueryMatchResultValue0 +func (t SecurityTimelineAPIQueryMatchResult_Value) AsSecurityTimelineAPIQueryMatchResultValue0() (SecurityTimelineAPIQueryMatchResultValue0, error) { + var body SecurityTimelineAPIQueryMatchResultValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIQueryMatchResultValue0 overwrites any union data inside the SecurityTimelineAPIQueryMatchResult_Value as the provided SecurityTimelineAPIQueryMatchResultValue0 +func (t *SecurityTimelineAPIQueryMatchResult_Value) FromSecurityTimelineAPIQueryMatchResultValue0(v SecurityTimelineAPIQueryMatchResultValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIQueryMatchResultValue0 performs a merge with any union data inside the SecurityTimelineAPIQueryMatchResult_Value, using the provided SecurityTimelineAPIQueryMatchResultValue0 +func (t *SecurityTimelineAPIQueryMatchResult_Value) MergeSecurityTimelineAPIQueryMatchResultValue0(v SecurityTimelineAPIQueryMatchResultValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPIQueryMatchResultValue1 returns the union data inside the SecurityTimelineAPIQueryMatchResult_Value as a SecurityTimelineAPIQueryMatchResultValue1 +func (t SecurityTimelineAPIQueryMatchResult_Value) AsSecurityTimelineAPIQueryMatchResultValue1() (SecurityTimelineAPIQueryMatchResultValue1, error) { + var body SecurityTimelineAPIQueryMatchResultValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPIQueryMatchResultValue1 overwrites any union data inside the SecurityTimelineAPIQueryMatchResult_Value as the provided SecurityTimelineAPIQueryMatchResultValue1 +func (t *SecurityTimelineAPIQueryMatchResult_Value) FromSecurityTimelineAPIQueryMatchResultValue1(v SecurityTimelineAPIQueryMatchResultValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPIQueryMatchResultValue1 performs a merge with any union data inside the SecurityTimelineAPIQueryMatchResult_Value, using the provided SecurityTimelineAPIQueryMatchResultValue1 +func (t *SecurityTimelineAPIQueryMatchResult_Value) MergeSecurityTimelineAPIQueryMatchResultValue1(v SecurityTimelineAPIQueryMatchResultValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPIQueryMatchResult_Value) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPIQueryMatchResult_Value) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPISavedObjectIds0 returns the union data inside the SecurityTimelineAPISavedObjectIds as a SecurityTimelineAPISavedObjectIds0 +func (t SecurityTimelineAPISavedObjectIds) AsSecurityTimelineAPISavedObjectIds0() (SecurityTimelineAPISavedObjectIds0, error) { + var body SecurityTimelineAPISavedObjectIds0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedObjectIds0 overwrites any union data inside the SecurityTimelineAPISavedObjectIds as the provided SecurityTimelineAPISavedObjectIds0 +func (t *SecurityTimelineAPISavedObjectIds) FromSecurityTimelineAPISavedObjectIds0(v SecurityTimelineAPISavedObjectIds0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedObjectIds0 performs a merge with any union data inside the SecurityTimelineAPISavedObjectIds, using the provided SecurityTimelineAPISavedObjectIds0 +func (t *SecurityTimelineAPISavedObjectIds) MergeSecurityTimelineAPISavedObjectIds0(v SecurityTimelineAPISavedObjectIds0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPISavedObjectIds1 returns the union data inside the SecurityTimelineAPISavedObjectIds as a SecurityTimelineAPISavedObjectIds1 +func (t SecurityTimelineAPISavedObjectIds) AsSecurityTimelineAPISavedObjectIds1() (SecurityTimelineAPISavedObjectIds1, error) { + var body SecurityTimelineAPISavedObjectIds1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedObjectIds1 overwrites any union data inside the SecurityTimelineAPISavedObjectIds as the provided SecurityTimelineAPISavedObjectIds1 +func (t *SecurityTimelineAPISavedObjectIds) FromSecurityTimelineAPISavedObjectIds1(v SecurityTimelineAPISavedObjectIds1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedObjectIds1 performs a merge with any union data inside the SecurityTimelineAPISavedObjectIds, using the provided SecurityTimelineAPISavedObjectIds1 +func (t *SecurityTimelineAPISavedObjectIds) MergeSecurityTimelineAPISavedObjectIds1(v SecurityTimelineAPISavedObjectIds1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPISavedObjectIds) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPISavedObjectIds) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPISavedTimelineDateRangeEnd0 returns the union data inside the SecurityTimelineAPISavedTimeline_DateRange_End as a SecurityTimelineAPISavedTimelineDateRangeEnd0 +func (t SecurityTimelineAPISavedTimeline_DateRange_End) AsSecurityTimelineAPISavedTimelineDateRangeEnd0() (SecurityTimelineAPISavedTimelineDateRangeEnd0, error) { + var body SecurityTimelineAPISavedTimelineDateRangeEnd0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineDateRangeEnd0 overwrites any union data inside the SecurityTimelineAPISavedTimeline_DateRange_End as the provided SecurityTimelineAPISavedTimelineDateRangeEnd0 +func (t *SecurityTimelineAPISavedTimeline_DateRange_End) FromSecurityTimelineAPISavedTimelineDateRangeEnd0(v SecurityTimelineAPISavedTimelineDateRangeEnd0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineDateRangeEnd0 performs a merge with any union data inside the SecurityTimelineAPISavedTimeline_DateRange_End, using the provided SecurityTimelineAPISavedTimelineDateRangeEnd0 +func (t *SecurityTimelineAPISavedTimeline_DateRange_End) MergeSecurityTimelineAPISavedTimelineDateRangeEnd0(v SecurityTimelineAPISavedTimelineDateRangeEnd0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPISavedTimelineDateRangeEnd1 returns the union data inside the SecurityTimelineAPISavedTimeline_DateRange_End as a SecurityTimelineAPISavedTimelineDateRangeEnd1 +func (t SecurityTimelineAPISavedTimeline_DateRange_End) AsSecurityTimelineAPISavedTimelineDateRangeEnd1() (SecurityTimelineAPISavedTimelineDateRangeEnd1, error) { + var body SecurityTimelineAPISavedTimelineDateRangeEnd1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineDateRangeEnd1 overwrites any union data inside the SecurityTimelineAPISavedTimeline_DateRange_End as the provided SecurityTimelineAPISavedTimelineDateRangeEnd1 +func (t *SecurityTimelineAPISavedTimeline_DateRange_End) FromSecurityTimelineAPISavedTimelineDateRangeEnd1(v SecurityTimelineAPISavedTimelineDateRangeEnd1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineDateRangeEnd1 performs a merge with any union data inside the SecurityTimelineAPISavedTimeline_DateRange_End, using the provided SecurityTimelineAPISavedTimelineDateRangeEnd1 +func (t *SecurityTimelineAPISavedTimeline_DateRange_End) MergeSecurityTimelineAPISavedTimelineDateRangeEnd1(v SecurityTimelineAPISavedTimelineDateRangeEnd1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPISavedTimeline_DateRange_End) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPISavedTimeline_DateRange_End) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPISavedTimelineDateRangeStart0 returns the union data inside the SecurityTimelineAPISavedTimeline_DateRange_Start as a SecurityTimelineAPISavedTimelineDateRangeStart0 +func (t SecurityTimelineAPISavedTimeline_DateRange_Start) AsSecurityTimelineAPISavedTimelineDateRangeStart0() (SecurityTimelineAPISavedTimelineDateRangeStart0, error) { + var body SecurityTimelineAPISavedTimelineDateRangeStart0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineDateRangeStart0 overwrites any union data inside the SecurityTimelineAPISavedTimeline_DateRange_Start as the provided SecurityTimelineAPISavedTimelineDateRangeStart0 +func (t *SecurityTimelineAPISavedTimeline_DateRange_Start) FromSecurityTimelineAPISavedTimelineDateRangeStart0(v SecurityTimelineAPISavedTimelineDateRangeStart0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineDateRangeStart0 performs a merge with any union data inside the SecurityTimelineAPISavedTimeline_DateRange_Start, using the provided SecurityTimelineAPISavedTimelineDateRangeStart0 +func (t *SecurityTimelineAPISavedTimeline_DateRange_Start) MergeSecurityTimelineAPISavedTimelineDateRangeStart0(v SecurityTimelineAPISavedTimelineDateRangeStart0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPISavedTimelineDateRangeStart1 returns the union data inside the SecurityTimelineAPISavedTimeline_DateRange_Start as a SecurityTimelineAPISavedTimelineDateRangeStart1 +func (t SecurityTimelineAPISavedTimeline_DateRange_Start) AsSecurityTimelineAPISavedTimelineDateRangeStart1() (SecurityTimelineAPISavedTimelineDateRangeStart1, error) { + var body SecurityTimelineAPISavedTimelineDateRangeStart1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineDateRangeStart1 overwrites any union data inside the SecurityTimelineAPISavedTimeline_DateRange_Start as the provided SecurityTimelineAPISavedTimelineDateRangeStart1 +func (t *SecurityTimelineAPISavedTimeline_DateRange_Start) FromSecurityTimelineAPISavedTimelineDateRangeStart1(v SecurityTimelineAPISavedTimelineDateRangeStart1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineDateRangeStart1 performs a merge with any union data inside the SecurityTimelineAPISavedTimeline_DateRange_Start, using the provided SecurityTimelineAPISavedTimelineDateRangeStart1 +func (t *SecurityTimelineAPISavedTimeline_DateRange_Start) MergeSecurityTimelineAPISavedTimelineDateRangeStart1(v SecurityTimelineAPISavedTimelineDateRangeStart1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPISavedTimeline_DateRange_Start) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPISavedTimeline_DateRange_Start) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPISavedTimelineEqlOptionsSize0 returns the union data inside the SecurityTimelineAPISavedTimeline_EqlOptions_Size as a SecurityTimelineAPISavedTimelineEqlOptionsSize0 +func (t SecurityTimelineAPISavedTimeline_EqlOptions_Size) AsSecurityTimelineAPISavedTimelineEqlOptionsSize0() (SecurityTimelineAPISavedTimelineEqlOptionsSize0, error) { + var body SecurityTimelineAPISavedTimelineEqlOptionsSize0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineEqlOptionsSize0 overwrites any union data inside the SecurityTimelineAPISavedTimeline_EqlOptions_Size as the provided SecurityTimelineAPISavedTimelineEqlOptionsSize0 +func (t *SecurityTimelineAPISavedTimeline_EqlOptions_Size) FromSecurityTimelineAPISavedTimelineEqlOptionsSize0(v SecurityTimelineAPISavedTimelineEqlOptionsSize0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineEqlOptionsSize0 performs a merge with any union data inside the SecurityTimelineAPISavedTimeline_EqlOptions_Size, using the provided SecurityTimelineAPISavedTimelineEqlOptionsSize0 +func (t *SecurityTimelineAPISavedTimeline_EqlOptions_Size) MergeSecurityTimelineAPISavedTimelineEqlOptionsSize0(v SecurityTimelineAPISavedTimelineEqlOptionsSize0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPISavedTimelineEqlOptionsSize1 returns the union data inside the SecurityTimelineAPISavedTimeline_EqlOptions_Size as a SecurityTimelineAPISavedTimelineEqlOptionsSize1 +func (t SecurityTimelineAPISavedTimeline_EqlOptions_Size) AsSecurityTimelineAPISavedTimelineEqlOptionsSize1() (SecurityTimelineAPISavedTimelineEqlOptionsSize1, error) { + var body SecurityTimelineAPISavedTimelineEqlOptionsSize1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineEqlOptionsSize1 overwrites any union data inside the SecurityTimelineAPISavedTimeline_EqlOptions_Size as the provided SecurityTimelineAPISavedTimelineEqlOptionsSize1 +func (t *SecurityTimelineAPISavedTimeline_EqlOptions_Size) FromSecurityTimelineAPISavedTimelineEqlOptionsSize1(v SecurityTimelineAPISavedTimelineEqlOptionsSize1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineEqlOptionsSize1 performs a merge with any union data inside the SecurityTimelineAPISavedTimeline_EqlOptions_Size, using the provided SecurityTimelineAPISavedTimelineEqlOptionsSize1 +func (t *SecurityTimelineAPISavedTimeline_EqlOptions_Size) MergeSecurityTimelineAPISavedTimelineEqlOptionsSize1(v SecurityTimelineAPISavedTimelineEqlOptionsSize1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPISavedTimeline_EqlOptions_Size) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPISavedTimeline_EqlOptions_Size) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0 returns the union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End as a SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0 +func (t SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End) AsSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0() (SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0, error) { + var body SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0 overwrites any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End as the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End) FromSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0(v SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0 performs a merge with any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End, using the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End) MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0(v SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1 returns the union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End as a SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1 +func (t SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End) AsSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1() (SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1, error) { + var body SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1 overwrites any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End as the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End) FromSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1(v SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1 performs a merge with any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End, using the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End) MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1(v SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeEnd1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_End) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0 returns the union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start as a SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0 +func (t SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start) AsSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0() (SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0, error) { + var body SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0 overwrites any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start as the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start) FromSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0(v SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0 performs a merge with any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start, using the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start) MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0(v SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1 returns the union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start as a SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1 +func (t SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start) AsSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1() (SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1, error) { + var body SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1 overwrites any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start as the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start) FromSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1(v SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1 performs a merge with any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start, using the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start) MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1(v SecurityTimelineAPISavedTimelineWithSavedObjectIdDateRangeStart1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_DateRange_Start) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0 returns the union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size as a SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0 +func (t SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size) AsSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0() (SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0, error) { + var body SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0 overwrites any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size as the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size) FromSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0(v SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0 performs a merge with any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size, using the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size) MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0(v SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1 returns the union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size as a SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1 +func (t SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size) AsSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1() (SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1, error) { + var body SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1 overwrites any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size as the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size) FromSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1(v SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1 performs a merge with any union data inside the SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size, using the provided SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1 +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size) MergeSecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1(v SecurityTimelineAPISavedTimelineWithSavedObjectIdEqlOptionsSize1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPISavedTimelineWithSavedObjectId_EqlOptions_Size) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPISortObject returns the union data inside the SecurityTimelineAPISort as a SecurityTimelineAPISortObject +func (t SecurityTimelineAPISort) AsSecurityTimelineAPISortObject() (SecurityTimelineAPISortObject, error) { + var body SecurityTimelineAPISortObject + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISortObject overwrites any union data inside the SecurityTimelineAPISort as the provided SecurityTimelineAPISortObject +func (t *SecurityTimelineAPISort) FromSecurityTimelineAPISortObject(v SecurityTimelineAPISortObject) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISortObject performs a merge with any union data inside the SecurityTimelineAPISort, using the provided SecurityTimelineAPISortObject +func (t *SecurityTimelineAPISort) MergeSecurityTimelineAPISortObject(v SecurityTimelineAPISortObject) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPISort1 returns the union data inside the SecurityTimelineAPISort as a SecurityTimelineAPISort1 +func (t SecurityTimelineAPISort) AsSecurityTimelineAPISort1() (SecurityTimelineAPISort1, error) { + var body SecurityTimelineAPISort1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPISort1 overwrites any union data inside the SecurityTimelineAPISort as the provided SecurityTimelineAPISort1 +func (t *SecurityTimelineAPISort) FromSecurityTimelineAPISort1(v SecurityTimelineAPISort1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPISort1 performs a merge with any union data inside the SecurityTimelineAPISort, using the provided SecurityTimelineAPISort1 +func (t *SecurityTimelineAPISort) MergeSecurityTimelineAPISort1(v SecurityTimelineAPISort1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPISort) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPISort) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPITimelineResponseDateRangeEnd0 returns the union data inside the SecurityTimelineAPITimelineResponse_DateRange_End as a SecurityTimelineAPITimelineResponseDateRangeEnd0 +func (t SecurityTimelineAPITimelineResponse_DateRange_End) AsSecurityTimelineAPITimelineResponseDateRangeEnd0() (SecurityTimelineAPITimelineResponseDateRangeEnd0, error) { + var body SecurityTimelineAPITimelineResponseDateRangeEnd0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineResponseDateRangeEnd0 overwrites any union data inside the SecurityTimelineAPITimelineResponse_DateRange_End as the provided SecurityTimelineAPITimelineResponseDateRangeEnd0 +func (t *SecurityTimelineAPITimelineResponse_DateRange_End) FromSecurityTimelineAPITimelineResponseDateRangeEnd0(v SecurityTimelineAPITimelineResponseDateRangeEnd0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineResponseDateRangeEnd0 performs a merge with any union data inside the SecurityTimelineAPITimelineResponse_DateRange_End, using the provided SecurityTimelineAPITimelineResponseDateRangeEnd0 +func (t *SecurityTimelineAPITimelineResponse_DateRange_End) MergeSecurityTimelineAPITimelineResponseDateRangeEnd0(v SecurityTimelineAPITimelineResponseDateRangeEnd0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPITimelineResponseDateRangeEnd1 returns the union data inside the SecurityTimelineAPITimelineResponse_DateRange_End as a SecurityTimelineAPITimelineResponseDateRangeEnd1 +func (t SecurityTimelineAPITimelineResponse_DateRange_End) AsSecurityTimelineAPITimelineResponseDateRangeEnd1() (SecurityTimelineAPITimelineResponseDateRangeEnd1, error) { + var body SecurityTimelineAPITimelineResponseDateRangeEnd1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineResponseDateRangeEnd1 overwrites any union data inside the SecurityTimelineAPITimelineResponse_DateRange_End as the provided SecurityTimelineAPITimelineResponseDateRangeEnd1 +func (t *SecurityTimelineAPITimelineResponse_DateRange_End) FromSecurityTimelineAPITimelineResponseDateRangeEnd1(v SecurityTimelineAPITimelineResponseDateRangeEnd1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineResponseDateRangeEnd1 performs a merge with any union data inside the SecurityTimelineAPITimelineResponse_DateRange_End, using the provided SecurityTimelineAPITimelineResponseDateRangeEnd1 +func (t *SecurityTimelineAPITimelineResponse_DateRange_End) MergeSecurityTimelineAPITimelineResponseDateRangeEnd1(v SecurityTimelineAPITimelineResponseDateRangeEnd1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPITimelineResponse_DateRange_End) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPITimelineResponse_DateRange_End) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPITimelineResponseDateRangeStart0 returns the union data inside the SecurityTimelineAPITimelineResponse_DateRange_Start as a SecurityTimelineAPITimelineResponseDateRangeStart0 +func (t SecurityTimelineAPITimelineResponse_DateRange_Start) AsSecurityTimelineAPITimelineResponseDateRangeStart0() (SecurityTimelineAPITimelineResponseDateRangeStart0, error) { + var body SecurityTimelineAPITimelineResponseDateRangeStart0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineResponseDateRangeStart0 overwrites any union data inside the SecurityTimelineAPITimelineResponse_DateRange_Start as the provided SecurityTimelineAPITimelineResponseDateRangeStart0 +func (t *SecurityTimelineAPITimelineResponse_DateRange_Start) FromSecurityTimelineAPITimelineResponseDateRangeStart0(v SecurityTimelineAPITimelineResponseDateRangeStart0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineResponseDateRangeStart0 performs a merge with any union data inside the SecurityTimelineAPITimelineResponse_DateRange_Start, using the provided SecurityTimelineAPITimelineResponseDateRangeStart0 +func (t *SecurityTimelineAPITimelineResponse_DateRange_Start) MergeSecurityTimelineAPITimelineResponseDateRangeStart0(v SecurityTimelineAPITimelineResponseDateRangeStart0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPITimelineResponseDateRangeStart1 returns the union data inside the SecurityTimelineAPITimelineResponse_DateRange_Start as a SecurityTimelineAPITimelineResponseDateRangeStart1 +func (t SecurityTimelineAPITimelineResponse_DateRange_Start) AsSecurityTimelineAPITimelineResponseDateRangeStart1() (SecurityTimelineAPITimelineResponseDateRangeStart1, error) { + var body SecurityTimelineAPITimelineResponseDateRangeStart1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineResponseDateRangeStart1 overwrites any union data inside the SecurityTimelineAPITimelineResponse_DateRange_Start as the provided SecurityTimelineAPITimelineResponseDateRangeStart1 +func (t *SecurityTimelineAPITimelineResponse_DateRange_Start) FromSecurityTimelineAPITimelineResponseDateRangeStart1(v SecurityTimelineAPITimelineResponseDateRangeStart1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineResponseDateRangeStart1 performs a merge with any union data inside the SecurityTimelineAPITimelineResponse_DateRange_Start, using the provided SecurityTimelineAPITimelineResponseDateRangeStart1 +func (t *SecurityTimelineAPITimelineResponse_DateRange_Start) MergeSecurityTimelineAPITimelineResponseDateRangeStart1(v SecurityTimelineAPITimelineResponseDateRangeStart1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPITimelineResponse_DateRange_Start) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPITimelineResponse_DateRange_Start) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPITimelineResponseEqlOptionsSize0 returns the union data inside the SecurityTimelineAPITimelineResponse_EqlOptions_Size as a SecurityTimelineAPITimelineResponseEqlOptionsSize0 +func (t SecurityTimelineAPITimelineResponse_EqlOptions_Size) AsSecurityTimelineAPITimelineResponseEqlOptionsSize0() (SecurityTimelineAPITimelineResponseEqlOptionsSize0, error) { + var body SecurityTimelineAPITimelineResponseEqlOptionsSize0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineResponseEqlOptionsSize0 overwrites any union data inside the SecurityTimelineAPITimelineResponse_EqlOptions_Size as the provided SecurityTimelineAPITimelineResponseEqlOptionsSize0 +func (t *SecurityTimelineAPITimelineResponse_EqlOptions_Size) FromSecurityTimelineAPITimelineResponseEqlOptionsSize0(v SecurityTimelineAPITimelineResponseEqlOptionsSize0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineResponseEqlOptionsSize0 performs a merge with any union data inside the SecurityTimelineAPITimelineResponse_EqlOptions_Size, using the provided SecurityTimelineAPITimelineResponseEqlOptionsSize0 +func (t *SecurityTimelineAPITimelineResponse_EqlOptions_Size) MergeSecurityTimelineAPITimelineResponseEqlOptionsSize0(v SecurityTimelineAPITimelineResponseEqlOptionsSize0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPITimelineResponseEqlOptionsSize1 returns the union data inside the SecurityTimelineAPITimelineResponse_EqlOptions_Size as a SecurityTimelineAPITimelineResponseEqlOptionsSize1 +func (t SecurityTimelineAPITimelineResponse_EqlOptions_Size) AsSecurityTimelineAPITimelineResponseEqlOptionsSize1() (SecurityTimelineAPITimelineResponseEqlOptionsSize1, error) { + var body SecurityTimelineAPITimelineResponseEqlOptionsSize1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineResponseEqlOptionsSize1 overwrites any union data inside the SecurityTimelineAPITimelineResponse_EqlOptions_Size as the provided SecurityTimelineAPITimelineResponseEqlOptionsSize1 +func (t *SecurityTimelineAPITimelineResponse_EqlOptions_Size) FromSecurityTimelineAPITimelineResponseEqlOptionsSize1(v SecurityTimelineAPITimelineResponseEqlOptionsSize1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineResponseEqlOptionsSize1 performs a merge with any union data inside the SecurityTimelineAPITimelineResponse_EqlOptions_Size, using the provided SecurityTimelineAPITimelineResponseEqlOptionsSize1 +func (t *SecurityTimelineAPITimelineResponse_EqlOptions_Size) MergeSecurityTimelineAPITimelineResponseEqlOptionsSize1(v SecurityTimelineAPITimelineResponseEqlOptionsSize1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPITimelineResponse_EqlOptions_Size) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPITimelineResponse_EqlOptions_Size) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0 returns the union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End as a SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0 +func (t SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End) AsSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0() (SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0, error) { + var body SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0 overwrites any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End as the provided SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End) FromSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0(v SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0 performs a merge with any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End, using the provided SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End) MergeSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0(v SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1 returns the union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End as a SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1 +func (t SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End) AsSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1() (SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1, error) { + var body SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1 overwrites any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End as the provided SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End) FromSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1(v SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1 performs a merge with any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End, using the provided SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End) MergeSecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1(v SecurityTimelineAPITimelineSavedToReturnObjectDateRangeEnd1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_End) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0 returns the union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start as a SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0 +func (t SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start) AsSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0() (SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0, error) { + var body SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0 overwrites any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start as the provided SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start) FromSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0(v SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0 performs a merge with any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start, using the provided SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start) MergeSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0(v SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1 returns the union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start as a SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1 +func (t SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start) AsSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1() (SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1, error) { + var body SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1 overwrites any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start as the provided SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start) FromSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1(v SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1 performs a merge with any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start, using the provided SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start) MergeSecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1(v SecurityTimelineAPITimelineSavedToReturnObjectDateRangeStart1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPITimelineSavedToReturnObject_DateRange_Start) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0 returns the union data inside the SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size as a SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0 +func (t SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size) AsSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0() (SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0, error) { + var body SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0 overwrites any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size as the provided SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size) FromSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0(v SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0 performs a merge with any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size, using the provided SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size) MergeSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0(v SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1 returns the union data inside the SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size as a SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1 +func (t SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size) AsSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1() (SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1, error) { + var body SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1 overwrites any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size as the provided SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size) FromSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1(v SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1 performs a merge with any union data inside the SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size, using the provided SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1 +func (t *SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size) MergeSecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1(v SecurityTimelineAPITimelineSavedToReturnObjectEqlOptionsSize1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *SecurityTimelineAPITimelineSavedToReturnObject_EqlOptions_Size) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1StreamsVars0 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars0 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars0() (AgentPolicyPackagePolicies1Inputs1StreamsVars0, error) { + var body AgentPolicyPackagePolicies1Inputs1StreamsVars0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1StreamsVars0 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars0 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars0(v AgentPolicyPackagePolicies1Inputs1StreamsVars0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars0 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars0 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars0(v AgentPolicyPackagePolicies1Inputs1StreamsVars0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1StreamsVars1 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars1 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars1() (AgentPolicyPackagePolicies1Inputs1StreamsVars1, error) { + var body AgentPolicyPackagePolicies1Inputs1StreamsVars1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1StreamsVars1 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars1 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars1(v AgentPolicyPackagePolicies1Inputs1StreamsVars1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars1 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars1 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars1(v AgentPolicyPackagePolicies1Inputs1StreamsVars1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1StreamsVars2 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars2 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars2() (AgentPolicyPackagePolicies1Inputs1StreamsVars2, error) { + var body AgentPolicyPackagePolicies1Inputs1StreamsVars2 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1StreamsVars2 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars2 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars2(v AgentPolicyPackagePolicies1Inputs1StreamsVars2) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars2 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars2 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars2(v AgentPolicyPackagePolicies1Inputs1StreamsVars2) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1StreamsVars3 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars3 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars3() (AgentPolicyPackagePolicies1Inputs1StreamsVars3, error) { + var body AgentPolicyPackagePolicies1Inputs1StreamsVars3 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1StreamsVars3 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars3 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars3(v AgentPolicyPackagePolicies1Inputs1StreamsVars3) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars3 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars3 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars3(v AgentPolicyPackagePolicies1Inputs1StreamsVars3) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1StreamsVars4 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars4 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars4() (AgentPolicyPackagePolicies1Inputs1StreamsVars4, error) { + var body AgentPolicyPackagePolicies1Inputs1StreamsVars4 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1StreamsVars4 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars4 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars4(v AgentPolicyPackagePolicies1Inputs1StreamsVars4) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars4 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars4 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars4(v AgentPolicyPackagePolicies1Inputs1StreamsVars4) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1StreamsVars5 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars5 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars5() (AgentPolicyPackagePolicies1Inputs1StreamsVars5, error) { + var body AgentPolicyPackagePolicies1Inputs1StreamsVars5 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1StreamsVars5 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars5 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars5(v AgentPolicyPackagePolicies1Inputs1StreamsVars5) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars5 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars5 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars5(v AgentPolicyPackagePolicies1Inputs1StreamsVars5) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1Vars0 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars0 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars0() (AgentPolicyPackagePolicies1Inputs1Vars0, error) { + var body AgentPolicyPackagePolicies1Inputs1Vars0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1Vars0 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars0 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars0(v AgentPolicyPackagePolicies1Inputs1Vars0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1Vars0 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars0 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars0(v AgentPolicyPackagePolicies1Inputs1Vars0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1Vars1 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars1 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars1() (AgentPolicyPackagePolicies1Inputs1Vars1, error) { + var body AgentPolicyPackagePolicies1Inputs1Vars1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1Vars1 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars1 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars1(v AgentPolicyPackagePolicies1Inputs1Vars1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1Vars1 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars1 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars1(v AgentPolicyPackagePolicies1Inputs1Vars1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1Vars2 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars2 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars2() (AgentPolicyPackagePolicies1Inputs1Vars2, error) { + var body AgentPolicyPackagePolicies1Inputs1Vars2 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1Vars2 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars2 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars2(v AgentPolicyPackagePolicies1Inputs1Vars2) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1Vars2 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars2 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars2(v AgentPolicyPackagePolicies1Inputs1Vars2) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1Vars3 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars3 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars3() (AgentPolicyPackagePolicies1Inputs1Vars3, error) { + var body AgentPolicyPackagePolicies1Inputs1Vars3 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1Vars3 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars3 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars3(v AgentPolicyPackagePolicies1Inputs1Vars3) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1Vars3 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars3 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars3(v AgentPolicyPackagePolicies1Inputs1Vars3) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1Vars4 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars4 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars4() (AgentPolicyPackagePolicies1Inputs1Vars4, error) { + var body AgentPolicyPackagePolicies1Inputs1Vars4 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1Vars4 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars4 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars4(v AgentPolicyPackagePolicies1Inputs1Vars4) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1Vars4 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars4 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars4(v AgentPolicyPackagePolicies1Inputs1Vars4) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1Vars5 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars5 +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars5() (AgentPolicyPackagePolicies1Inputs1Vars5, error) { + var body AgentPolicyPackagePolicies1Inputs1Vars5 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1Vars5 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars5 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars5(v AgentPolicyPackagePolicies1Inputs1Vars5) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1Vars5 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars5 +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars5(v AgentPolicyPackagePolicies1Inputs1Vars5) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsAgentPolicyPackagePolicies1Inputs0 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs as a AgentPolicyPackagePolicies1Inputs0 +func (t AgentPolicy_PackagePolicies_1_Inputs) AsAgentPolicyPackagePolicies1Inputs0() (AgentPolicyPackagePolicies1Inputs0, error) { + var body AgentPolicyPackagePolicies1Inputs0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs0 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs as the provided AgentPolicyPackagePolicies1Inputs0 +func (t *AgentPolicy_PackagePolicies_1_Inputs) FromAgentPolicyPackagePolicies1Inputs0(v AgentPolicyPackagePolicies1Inputs0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs0 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs, using the provided AgentPolicyPackagePolicies1Inputs0 +func (t *AgentPolicy_PackagePolicies_1_Inputs) MergeAgentPolicyPackagePolicies1Inputs0(v AgentPolicyPackagePolicies1Inputs0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Inputs1 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs as a AgentPolicyPackagePolicies1Inputs1 +func (t AgentPolicy_PackagePolicies_1_Inputs) AsAgentPolicyPackagePolicies1Inputs1() (AgentPolicyPackagePolicies1Inputs1, error) { + var body AgentPolicyPackagePolicies1Inputs1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Inputs1 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs as the provided AgentPolicyPackagePolicies1Inputs1 +func (t *AgentPolicy_PackagePolicies_1_Inputs) FromAgentPolicyPackagePolicies1Inputs1(v AgentPolicyPackagePolicies1Inputs1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Inputs1 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs, using the provided AgentPolicyPackagePolicies1Inputs1 +func (t *AgentPolicy_PackagePolicies_1_Inputs) MergeAgentPolicyPackagePolicies1Inputs1(v AgentPolicyPackagePolicies1Inputs1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t AgentPolicy_PackagePolicies_1_Inputs) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *AgentPolicy_PackagePolicies_1_Inputs) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsAgentPolicyPackagePolicies1Vars10 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars10 +func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars10() (AgentPolicyPackagePolicies1Vars10, error) { + var body AgentPolicyPackagePolicies1Vars10 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Vars10 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars10 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars10(v AgentPolicyPackagePolicies1Vars10) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Vars10 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars10 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars10(v AgentPolicyPackagePolicies1Vars10) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Vars11 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars11 +func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars11() (AgentPolicyPackagePolicies1Vars11, error) { + var body AgentPolicyPackagePolicies1Vars11 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Vars11 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars11 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars11(v AgentPolicyPackagePolicies1Vars11) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Vars11 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars11 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars11(v AgentPolicyPackagePolicies1Vars11) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Vars12 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars12 +func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars12() (AgentPolicyPackagePolicies1Vars12, error) { + var body AgentPolicyPackagePolicies1Vars12 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Vars12 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars12 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars12(v AgentPolicyPackagePolicies1Vars12) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Vars12 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars12 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars12(v AgentPolicyPackagePolicies1Vars12) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Vars13 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars13 +func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars13() (AgentPolicyPackagePolicies1Vars13, error) { + var body AgentPolicyPackagePolicies1Vars13 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Vars13 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars13 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars13(v AgentPolicyPackagePolicies1Vars13) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Vars13 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars13 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars13(v AgentPolicyPackagePolicies1Vars13) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Vars14 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars14 +func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars14() (AgentPolicyPackagePolicies1Vars14, error) { + var body AgentPolicyPackagePolicies1Vars14 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Vars14 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars14 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars14(v AgentPolicyPackagePolicies1Vars14) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Vars14 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars14 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars14(v AgentPolicyPackagePolicies1Vars14) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Vars15 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars15 +func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars15() (AgentPolicyPackagePolicies1Vars15, error) { + var body AgentPolicyPackagePolicies1Vars15 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Vars15 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars15 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars15(v AgentPolicyPackagePolicies1Vars15) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Vars15 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars15 +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars15(v AgentPolicyPackagePolicies1Vars15) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsAgentPolicyPackagePolicies1Vars0 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars as a AgentPolicyPackagePolicies1Vars0 +func (t AgentPolicy_PackagePolicies_1_Vars) AsAgentPolicyPackagePolicies1Vars0() (AgentPolicyPackagePolicies1Vars0, error) { + var body AgentPolicyPackagePolicies1Vars0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Vars0 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars as the provided AgentPolicyPackagePolicies1Vars0 +func (t *AgentPolicy_PackagePolicies_1_Vars) FromAgentPolicyPackagePolicies1Vars0(v AgentPolicyPackagePolicies1Vars0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Vars0 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars, using the provided AgentPolicyPackagePolicies1Vars0 +func (t *AgentPolicy_PackagePolicies_1_Vars) MergeAgentPolicyPackagePolicies1Vars0(v AgentPolicyPackagePolicies1Vars0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1Vars1 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars as a AgentPolicyPackagePolicies1Vars1 +func (t AgentPolicy_PackagePolicies_1_Vars) AsAgentPolicyPackagePolicies1Vars1() (AgentPolicyPackagePolicies1Vars1, error) { + var body AgentPolicyPackagePolicies1Vars1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1Vars1 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars as the provided AgentPolicyPackagePolicies1Vars1 +func (t *AgentPolicy_PackagePolicies_1_Vars) FromAgentPolicyPackagePolicies1Vars1(v AgentPolicyPackagePolicies1Vars1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1Vars1 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars, using the provided AgentPolicyPackagePolicies1Vars1 +func (t *AgentPolicy_PackagePolicies_1_Vars) MergeAgentPolicyPackagePolicies1Vars1(v AgentPolicyPackagePolicies1Vars1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t AgentPolicy_PackagePolicies_1_Vars) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *AgentPolicy_PackagePolicies_1_Vars) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsAgentPolicyPackagePolicies0 returns the union data inside the AgentPolicy_PackagePolicies as a AgentPolicyPackagePolicies0 +func (t AgentPolicy_PackagePolicies) AsAgentPolicyPackagePolicies0() (AgentPolicyPackagePolicies0, error) { + var body AgentPolicyPackagePolicies0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies0 overwrites any union data inside the AgentPolicy_PackagePolicies as the provided AgentPolicyPackagePolicies0 +func (t *AgentPolicy_PackagePolicies) FromAgentPolicyPackagePolicies0(v AgentPolicyPackagePolicies0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies0 performs a merge with any union data inside the AgentPolicy_PackagePolicies, using the provided AgentPolicyPackagePolicies0 +func (t *AgentPolicy_PackagePolicies) MergeAgentPolicyPackagePolicies0(v AgentPolicyPackagePolicies0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyPackagePolicies1 returns the union data inside the AgentPolicy_PackagePolicies as a AgentPolicyPackagePolicies1 +func (t AgentPolicy_PackagePolicies) AsAgentPolicyPackagePolicies1() (AgentPolicyPackagePolicies1, error) { + var body AgentPolicyPackagePolicies1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyPackagePolicies1 overwrites any union data inside the AgentPolicy_PackagePolicies as the provided AgentPolicyPackagePolicies1 +func (t *AgentPolicy_PackagePolicies) FromAgentPolicyPackagePolicies1(v AgentPolicyPackagePolicies1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyPackagePolicies1 performs a merge with any union data inside the AgentPolicy_PackagePolicies, using the provided AgentPolicyPackagePolicies1 +func (t *AgentPolicy_PackagePolicies) MergeAgentPolicyPackagePolicies1(v AgentPolicyPackagePolicies1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t AgentPolicy_PackagePolicies) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *AgentPolicy_PackagePolicies) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsAgentPolicyGlobalDataTagsItemValue0 returns the union data inside the AgentPolicyGlobalDataTagsItem_Value as a AgentPolicyGlobalDataTagsItemValue0 +func (t AgentPolicyGlobalDataTagsItem_Value) AsAgentPolicyGlobalDataTagsItemValue0() (AgentPolicyGlobalDataTagsItemValue0, error) { + var body AgentPolicyGlobalDataTagsItemValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyGlobalDataTagsItemValue0 overwrites any union data inside the AgentPolicyGlobalDataTagsItem_Value as the provided AgentPolicyGlobalDataTagsItemValue0 +func (t *AgentPolicyGlobalDataTagsItem_Value) FromAgentPolicyGlobalDataTagsItemValue0(v AgentPolicyGlobalDataTagsItemValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyGlobalDataTagsItemValue0 performs a merge with any union data inside the AgentPolicyGlobalDataTagsItem_Value, using the provided AgentPolicyGlobalDataTagsItemValue0 +func (t *AgentPolicyGlobalDataTagsItem_Value) MergeAgentPolicyGlobalDataTagsItemValue0(v AgentPolicyGlobalDataTagsItemValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsAgentPolicyGlobalDataTagsItemValue1 returns the union data inside the AgentPolicyGlobalDataTagsItem_Value as a AgentPolicyGlobalDataTagsItemValue1 +func (t AgentPolicyGlobalDataTagsItem_Value) AsAgentPolicyGlobalDataTagsItemValue1() (AgentPolicyGlobalDataTagsItemValue1, error) { + var body AgentPolicyGlobalDataTagsItemValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromAgentPolicyGlobalDataTagsItemValue1 overwrites any union data inside the AgentPolicyGlobalDataTagsItem_Value as the provided AgentPolicyGlobalDataTagsItemValue1 +func (t *AgentPolicyGlobalDataTagsItem_Value) FromAgentPolicyGlobalDataTagsItemValue1(v AgentPolicyGlobalDataTagsItemValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeAgentPolicyGlobalDataTagsItemValue1 performs a merge with any union data inside the AgentPolicyGlobalDataTagsItem_Value, using the provided AgentPolicyGlobalDataTagsItemValue1 +func (t *AgentPolicyGlobalDataTagsItem_Value) MergeAgentPolicyGlobalDataTagsItemValue1(v AgentPolicyGlobalDataTagsItemValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t AgentPolicyGlobalDataTagsItem_Value) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *AgentPolicyGlobalDataTagsItem_Value) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsBedrockConfig returns the union data inside the CreateConnectorConfig as a BedrockConfig +func (t CreateConnectorConfig) AsBedrockConfig() (BedrockConfig, error) { + var body BedrockConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromBedrockConfig overwrites any union data inside the CreateConnectorConfig as the provided BedrockConfig +func (t *CreateConnectorConfig) FromBedrockConfig(v BedrockConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeBedrockConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided BedrockConfig +func (t *CreateConnectorConfig) MergeBedrockConfig(v BedrockConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCrowdstrikeConfig returns the union data inside the CreateConnectorConfig as a CrowdstrikeConfig +func (t CreateConnectorConfig) AsCrowdstrikeConfig() (CrowdstrikeConfig, error) { + var body CrowdstrikeConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCrowdstrikeConfig overwrites any union data inside the CreateConnectorConfig as the provided CrowdstrikeConfig +func (t *CreateConnectorConfig) FromCrowdstrikeConfig(v CrowdstrikeConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCrowdstrikeConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided CrowdstrikeConfig +func (t *CreateConnectorConfig) MergeCrowdstrikeConfig(v CrowdstrikeConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsD3securityConfig returns the union data inside the CreateConnectorConfig as a D3securityConfig +func (t CreateConnectorConfig) AsD3securityConfig() (D3securityConfig, error) { + var body D3securityConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromD3securityConfig overwrites any union data inside the CreateConnectorConfig as the provided D3securityConfig +func (t *CreateConnectorConfig) FromD3securityConfig(v D3securityConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeD3securityConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided D3securityConfig +func (t *CreateConnectorConfig) MergeD3securityConfig(v D3securityConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsEmailConfig returns the union data inside the CreateConnectorConfig as a EmailConfig +func (t CreateConnectorConfig) AsEmailConfig() (EmailConfig, error) { + var body EmailConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromEmailConfig overwrites any union data inside the CreateConnectorConfig as the provided EmailConfig +func (t *CreateConnectorConfig) FromEmailConfig(v EmailConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeEmailConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided EmailConfig +func (t *CreateConnectorConfig) MergeEmailConfig(v EmailConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGeminiConfig returns the union data inside the CreateConnectorConfig as a GeminiConfig +func (t CreateConnectorConfig) AsGeminiConfig() (GeminiConfig, error) { + var body GeminiConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGeminiConfig overwrites any union data inside the CreateConnectorConfig as the provided GeminiConfig +func (t *CreateConnectorConfig) FromGeminiConfig(v GeminiConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGeminiConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GeminiConfig +func (t *CreateConnectorConfig) MergeGeminiConfig(v GeminiConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsResilientConfig returns the union data inside the CreateConnectorConfig as a ResilientConfig +func (t CreateConnectorConfig) AsResilientConfig() (ResilientConfig, error) { + var body ResilientConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromResilientConfig overwrites any union data inside the CreateConnectorConfig as the provided ResilientConfig +func (t *CreateConnectorConfig) FromResilientConfig(v ResilientConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeResilientConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ResilientConfig +func (t *CreateConnectorConfig) MergeResilientConfig(v ResilientConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsIndexConfig returns the union data inside the CreateConnectorConfig as a IndexConfig +func (t CreateConnectorConfig) AsIndexConfig() (IndexConfig, error) { + var body IndexConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromIndexConfig overwrites any union data inside the CreateConnectorConfig as the provided IndexConfig +func (t *CreateConnectorConfig) FromIndexConfig(v IndexConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeIndexConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided IndexConfig +func (t *CreateConnectorConfig) MergeIndexConfig(v IndexConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsJiraConfig returns the union data inside the CreateConnectorConfig as a JiraConfig +func (t CreateConnectorConfig) AsJiraConfig() (JiraConfig, error) { + var body JiraConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromJiraConfig overwrites any union data inside the CreateConnectorConfig as the provided JiraConfig +func (t *CreateConnectorConfig) FromJiraConfig(v JiraConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeJiraConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided JiraConfig +func (t *CreateConnectorConfig) MergeJiraConfig(v JiraConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGenaiAzureConfig returns the union data inside the CreateConnectorConfig as a GenaiAzureConfig +func (t CreateConnectorConfig) AsGenaiAzureConfig() (GenaiAzureConfig, error) { + var body GenaiAzureConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGenaiAzureConfig overwrites any union data inside the CreateConnectorConfig as the provided GenaiAzureConfig +func (t *CreateConnectorConfig) FromGenaiAzureConfig(v GenaiAzureConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGenaiAzureConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GenaiAzureConfig +func (t *CreateConnectorConfig) MergeGenaiAzureConfig(v GenaiAzureConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGenaiOpenaiConfig returns the union data inside the CreateConnectorConfig as a GenaiOpenaiConfig +func (t CreateConnectorConfig) AsGenaiOpenaiConfig() (GenaiOpenaiConfig, error) { + var body GenaiOpenaiConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGenaiOpenaiConfig overwrites any union data inside the CreateConnectorConfig as the provided GenaiOpenaiConfig +func (t *CreateConnectorConfig) FromGenaiOpenaiConfig(v GenaiOpenaiConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGenaiOpenaiConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GenaiOpenaiConfig +func (t *CreateConnectorConfig) MergeGenaiOpenaiConfig(v GenaiOpenaiConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGenaiOpenaiOtherConfig returns the union data inside the CreateConnectorConfig as a GenaiOpenaiOtherConfig +func (t CreateConnectorConfig) AsGenaiOpenaiOtherConfig() (GenaiOpenaiOtherConfig, error) { + var body GenaiOpenaiOtherConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGenaiOpenaiOtherConfig overwrites any union data inside the CreateConnectorConfig as the provided GenaiOpenaiOtherConfig +func (t *CreateConnectorConfig) FromGenaiOpenaiOtherConfig(v GenaiOpenaiOtherConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGenaiOpenaiOtherConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GenaiOpenaiOtherConfig +func (t *CreateConnectorConfig) MergeGenaiOpenaiOtherConfig(v GenaiOpenaiOtherConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOpsgenieConfig returns the union data inside the CreateConnectorConfig as a OpsgenieConfig +func (t CreateConnectorConfig) AsOpsgenieConfig() (OpsgenieConfig, error) { + var body OpsgenieConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOpsgenieConfig overwrites any union data inside the CreateConnectorConfig as the provided OpsgenieConfig +func (t *CreateConnectorConfig) FromOpsgenieConfig(v OpsgenieConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOpsgenieConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided OpsgenieConfig +func (t *CreateConnectorConfig) MergeOpsgenieConfig(v OpsgenieConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPagerdutyConfig returns the union data inside the CreateConnectorConfig as a PagerdutyConfig +func (t CreateConnectorConfig) AsPagerdutyConfig() (PagerdutyConfig, error) { + var body PagerdutyConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPagerdutyConfig overwrites any union data inside the CreateConnectorConfig as the provided PagerdutyConfig +func (t *CreateConnectorConfig) FromPagerdutyConfig(v PagerdutyConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePagerdutyConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided PagerdutyConfig +func (t *CreateConnectorConfig) MergePagerdutyConfig(v PagerdutyConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSentineloneConfig returns the union data inside the CreateConnectorConfig as a SentineloneConfig +func (t CreateConnectorConfig) AsSentineloneConfig() (SentineloneConfig, error) { + var body SentineloneConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSentineloneConfig overwrites any union data inside the CreateConnectorConfig as the provided SentineloneConfig +func (t *CreateConnectorConfig) FromSentineloneConfig(v SentineloneConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSentineloneConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided SentineloneConfig +func (t *CreateConnectorConfig) MergeSentineloneConfig(v SentineloneConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServicenowConfig returns the union data inside the CreateConnectorConfig as a ServicenowConfig +func (t CreateConnectorConfig) AsServicenowConfig() (ServicenowConfig, error) { + var body ServicenowConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServicenowConfig overwrites any union data inside the CreateConnectorConfig as the provided ServicenowConfig +func (t *CreateConnectorConfig) FromServicenowConfig(v ServicenowConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServicenowConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ServicenowConfig +func (t *CreateConnectorConfig) MergeServicenowConfig(v ServicenowConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServicenowItomConfig returns the union data inside the CreateConnectorConfig as a ServicenowItomConfig +func (t CreateConnectorConfig) AsServicenowItomConfig() (ServicenowItomConfig, error) { + var body ServicenowItomConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServicenowItomConfig overwrites any union data inside the CreateConnectorConfig as the provided ServicenowItomConfig +func (t *CreateConnectorConfig) FromServicenowItomConfig(v ServicenowItomConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServicenowItomConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ServicenowItomConfig +func (t *CreateConnectorConfig) MergeServicenowItomConfig(v ServicenowItomConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSlackApiConfig returns the union data inside the CreateConnectorConfig as a SlackApiConfig +func (t CreateConnectorConfig) AsSlackApiConfig() (SlackApiConfig, error) { + var body SlackApiConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSlackApiConfig overwrites any union data inside the CreateConnectorConfig as the provided SlackApiConfig +func (t *CreateConnectorConfig) FromSlackApiConfig(v SlackApiConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSlackApiConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided SlackApiConfig +func (t *CreateConnectorConfig) MergeSlackApiConfig(v SlackApiConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSwimlaneConfig returns the union data inside the CreateConnectorConfig as a SwimlaneConfig +func (t CreateConnectorConfig) AsSwimlaneConfig() (SwimlaneConfig, error) { + var body SwimlaneConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSwimlaneConfig overwrites any union data inside the CreateConnectorConfig as the provided SwimlaneConfig +func (t *CreateConnectorConfig) FromSwimlaneConfig(v SwimlaneConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSwimlaneConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided SwimlaneConfig +func (t *CreateConnectorConfig) MergeSwimlaneConfig(v SwimlaneConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsThehiveConfig returns the union data inside the CreateConnectorConfig as a ThehiveConfig +func (t CreateConnectorConfig) AsThehiveConfig() (ThehiveConfig, error) { + var body ThehiveConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromThehiveConfig overwrites any union data inside the CreateConnectorConfig as the provided ThehiveConfig +func (t *CreateConnectorConfig) FromThehiveConfig(v ThehiveConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeThehiveConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ThehiveConfig +func (t *CreateConnectorConfig) MergeThehiveConfig(v ThehiveConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTinesConfig returns the union data inside the CreateConnectorConfig as a TinesConfig +func (t CreateConnectorConfig) AsTinesConfig() (TinesConfig, error) { + var body TinesConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTinesConfig overwrites any union data inside the CreateConnectorConfig as the provided TinesConfig +func (t *CreateConnectorConfig) FromTinesConfig(v TinesConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTinesConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided TinesConfig +func (t *CreateConnectorConfig) MergeTinesConfig(v TinesConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTorqConfig returns the union data inside the CreateConnectorConfig as a TorqConfig +func (t CreateConnectorConfig) AsTorqConfig() (TorqConfig, error) { + var body TorqConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTorqConfig overwrites any union data inside the CreateConnectorConfig as the provided TorqConfig +func (t *CreateConnectorConfig) FromTorqConfig(v TorqConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTorqConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided TorqConfig +func (t *CreateConnectorConfig) MergeTorqConfig(v TorqConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsWebhookConfig returns the union data inside the CreateConnectorConfig as a WebhookConfig +func (t CreateConnectorConfig) AsWebhookConfig() (WebhookConfig, error) { + var body WebhookConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromWebhookConfig overwrites any union data inside the CreateConnectorConfig as the provided WebhookConfig +func (t *CreateConnectorConfig) FromWebhookConfig(v WebhookConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeWebhookConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided WebhookConfig +func (t *CreateConnectorConfig) MergeWebhookConfig(v WebhookConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesWebhookConfig returns the union data inside the CreateConnectorConfig as a CasesWebhookConfig +func (t CreateConnectorConfig) AsCasesWebhookConfig() (CasesWebhookConfig, error) { + var body CasesWebhookConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesWebhookConfig overwrites any union data inside the CreateConnectorConfig as the provided CasesWebhookConfig +func (t *CreateConnectorConfig) FromCasesWebhookConfig(v CasesWebhookConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesWebhookConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided CasesWebhookConfig +func (t *CreateConnectorConfig) MergeCasesWebhookConfig(v CasesWebhookConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsXmattersConfig returns the union data inside the CreateConnectorConfig as a XmattersConfig +func (t CreateConnectorConfig) AsXmattersConfig() (XmattersConfig, error) { + var body XmattersConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromXmattersConfig overwrites any union data inside the CreateConnectorConfig as the provided XmattersConfig +func (t *CreateConnectorConfig) FromXmattersConfig(v XmattersConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeXmattersConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided XmattersConfig +func (t *CreateConnectorConfig) MergeXmattersConfig(v XmattersConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsBedrockSecrets returns the union data inside the CreateConnectorSecrets as a BedrockSecrets +func (t CreateConnectorSecrets) AsBedrockSecrets() (BedrockSecrets, error) { + var body BedrockSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromBedrockSecrets overwrites any union data inside the CreateConnectorSecrets as the provided BedrockSecrets +func (t *CreateConnectorSecrets) FromBedrockSecrets(v BedrockSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeBedrockSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided BedrockSecrets +func (t *CreateConnectorSecrets) MergeBedrockSecrets(v BedrockSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCrowdstrikeSecrets returns the union data inside the CreateConnectorSecrets as a CrowdstrikeSecrets +func (t CreateConnectorSecrets) AsCrowdstrikeSecrets() (CrowdstrikeSecrets, error) { + var body CrowdstrikeSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCrowdstrikeSecrets overwrites any union data inside the CreateConnectorSecrets as the provided CrowdstrikeSecrets +func (t *CreateConnectorSecrets) FromCrowdstrikeSecrets(v CrowdstrikeSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCrowdstrikeSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided CrowdstrikeSecrets +func (t *CreateConnectorSecrets) MergeCrowdstrikeSecrets(v CrowdstrikeSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsD3securitySecrets returns the union data inside the CreateConnectorSecrets as a D3securitySecrets +func (t CreateConnectorSecrets) AsD3securitySecrets() (D3securitySecrets, error) { + var body D3securitySecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromD3securitySecrets overwrites any union data inside the CreateConnectorSecrets as the provided D3securitySecrets +func (t *CreateConnectorSecrets) FromD3securitySecrets(v D3securitySecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeD3securitySecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided D3securitySecrets +func (t *CreateConnectorSecrets) MergeD3securitySecrets(v D3securitySecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsEmailSecrets returns the union data inside the CreateConnectorSecrets as a EmailSecrets +func (t CreateConnectorSecrets) AsEmailSecrets() (EmailSecrets, error) { + var body EmailSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromEmailSecrets overwrites any union data inside the CreateConnectorSecrets as the provided EmailSecrets +func (t *CreateConnectorSecrets) FromEmailSecrets(v EmailSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeEmailSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided EmailSecrets +func (t *CreateConnectorSecrets) MergeEmailSecrets(v EmailSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGeminiSecrets returns the union data inside the CreateConnectorSecrets as a GeminiSecrets +func (t CreateConnectorSecrets) AsGeminiSecrets() (GeminiSecrets, error) { + var body GeminiSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGeminiSecrets overwrites any union data inside the CreateConnectorSecrets as the provided GeminiSecrets +func (t *CreateConnectorSecrets) FromGeminiSecrets(v GeminiSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGeminiSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided GeminiSecrets +func (t *CreateConnectorSecrets) MergeGeminiSecrets(v GeminiSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsResilientSecrets returns the union data inside the CreateConnectorSecrets as a ResilientSecrets +func (t CreateConnectorSecrets) AsResilientSecrets() (ResilientSecrets, error) { + var body ResilientSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromResilientSecrets overwrites any union data inside the CreateConnectorSecrets as the provided ResilientSecrets +func (t *CreateConnectorSecrets) FromResilientSecrets(v ResilientSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeResilientSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided ResilientSecrets +func (t *CreateConnectorSecrets) MergeResilientSecrets(v ResilientSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsJiraSecrets returns the union data inside the CreateConnectorSecrets as a JiraSecrets +func (t CreateConnectorSecrets) AsJiraSecrets() (JiraSecrets, error) { + var body JiraSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromJiraSecrets overwrites any union data inside the CreateConnectorSecrets as the provided JiraSecrets +func (t *CreateConnectorSecrets) FromJiraSecrets(v JiraSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeJiraSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided JiraSecrets +func (t *CreateConnectorSecrets) MergeJiraSecrets(v JiraSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsDefenderSecrets returns the union data inside the CreateConnectorSecrets as a DefenderSecrets +func (t CreateConnectorSecrets) AsDefenderSecrets() (DefenderSecrets, error) { + var body DefenderSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromDefenderSecrets overwrites any union data inside the CreateConnectorSecrets as the provided DefenderSecrets +func (t *CreateConnectorSecrets) FromDefenderSecrets(v DefenderSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeDefenderSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided DefenderSecrets +func (t *CreateConnectorSecrets) MergeDefenderSecrets(v DefenderSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTeamsSecrets returns the union data inside the CreateConnectorSecrets as a TeamsSecrets +func (t CreateConnectorSecrets) AsTeamsSecrets() (TeamsSecrets, error) { + var body TeamsSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTeamsSecrets overwrites any union data inside the CreateConnectorSecrets as the provided TeamsSecrets +func (t *CreateConnectorSecrets) FromTeamsSecrets(v TeamsSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTeamsSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided TeamsSecrets +func (t *CreateConnectorSecrets) MergeTeamsSecrets(v TeamsSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGenaiSecrets returns the union data inside the CreateConnectorSecrets as a GenaiSecrets +func (t CreateConnectorSecrets) AsGenaiSecrets() (GenaiSecrets, error) { + var body GenaiSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGenaiSecrets overwrites any union data inside the CreateConnectorSecrets as the provided GenaiSecrets +func (t *CreateConnectorSecrets) FromGenaiSecrets(v GenaiSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGenaiSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided GenaiSecrets +func (t *CreateConnectorSecrets) MergeGenaiSecrets(v GenaiSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOpsgenieSecrets returns the union data inside the CreateConnectorSecrets as a OpsgenieSecrets +func (t CreateConnectorSecrets) AsOpsgenieSecrets() (OpsgenieSecrets, error) { + var body OpsgenieSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOpsgenieSecrets overwrites any union data inside the CreateConnectorSecrets as the provided OpsgenieSecrets +func (t *CreateConnectorSecrets) FromOpsgenieSecrets(v OpsgenieSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOpsgenieSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided OpsgenieSecrets +func (t *CreateConnectorSecrets) MergeOpsgenieSecrets(v OpsgenieSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPagerdutySecrets returns the union data inside the CreateConnectorSecrets as a PagerdutySecrets +func (t CreateConnectorSecrets) AsPagerdutySecrets() (PagerdutySecrets, error) { + var body PagerdutySecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPagerdutySecrets overwrites any union data inside the CreateConnectorSecrets as the provided PagerdutySecrets +func (t *CreateConnectorSecrets) FromPagerdutySecrets(v PagerdutySecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePagerdutySecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided PagerdutySecrets +func (t *CreateConnectorSecrets) MergePagerdutySecrets(v PagerdutySecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSentineloneSecrets returns the union data inside the CreateConnectorSecrets as a SentineloneSecrets +func (t CreateConnectorSecrets) AsSentineloneSecrets() (SentineloneSecrets, error) { + var body SentineloneSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSentineloneSecrets overwrites any union data inside the CreateConnectorSecrets as the provided SentineloneSecrets +func (t *CreateConnectorSecrets) FromSentineloneSecrets(v SentineloneSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSentineloneSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided SentineloneSecrets +func (t *CreateConnectorSecrets) MergeSentineloneSecrets(v SentineloneSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServicenowSecrets returns the union data inside the CreateConnectorSecrets as a ServicenowSecrets +func (t CreateConnectorSecrets) AsServicenowSecrets() (ServicenowSecrets, error) { + var body ServicenowSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServicenowSecrets overwrites any union data inside the CreateConnectorSecrets as the provided ServicenowSecrets +func (t *CreateConnectorSecrets) FromServicenowSecrets(v ServicenowSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServicenowSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided ServicenowSecrets +func (t *CreateConnectorSecrets) MergeServicenowSecrets(v ServicenowSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSlackApiSecrets returns the union data inside the CreateConnectorSecrets as a SlackApiSecrets +func (t CreateConnectorSecrets) AsSlackApiSecrets() (SlackApiSecrets, error) { + var body SlackApiSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSlackApiSecrets overwrites any union data inside the CreateConnectorSecrets as the provided SlackApiSecrets +func (t *CreateConnectorSecrets) FromSlackApiSecrets(v SlackApiSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSlackApiSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided SlackApiSecrets +func (t *CreateConnectorSecrets) MergeSlackApiSecrets(v SlackApiSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSwimlaneSecrets returns the union data inside the CreateConnectorSecrets as a SwimlaneSecrets +func (t CreateConnectorSecrets) AsSwimlaneSecrets() (SwimlaneSecrets, error) { + var body SwimlaneSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSwimlaneSecrets overwrites any union data inside the CreateConnectorSecrets as the provided SwimlaneSecrets +func (t *CreateConnectorSecrets) FromSwimlaneSecrets(v SwimlaneSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSwimlaneSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided SwimlaneSecrets +func (t *CreateConnectorSecrets) MergeSwimlaneSecrets(v SwimlaneSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsThehiveSecrets returns the union data inside the CreateConnectorSecrets as a ThehiveSecrets +func (t CreateConnectorSecrets) AsThehiveSecrets() (ThehiveSecrets, error) { + var body ThehiveSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromThehiveSecrets overwrites any union data inside the CreateConnectorSecrets as the provided ThehiveSecrets +func (t *CreateConnectorSecrets) FromThehiveSecrets(v ThehiveSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeThehiveSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided ThehiveSecrets +func (t *CreateConnectorSecrets) MergeThehiveSecrets(v ThehiveSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTinesSecrets returns the union data inside the CreateConnectorSecrets as a TinesSecrets +func (t CreateConnectorSecrets) AsTinesSecrets() (TinesSecrets, error) { + var body TinesSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTinesSecrets overwrites any union data inside the CreateConnectorSecrets as the provided TinesSecrets +func (t *CreateConnectorSecrets) FromTinesSecrets(v TinesSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTinesSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided TinesSecrets +func (t *CreateConnectorSecrets) MergeTinesSecrets(v TinesSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTorqSecrets returns the union data inside the CreateConnectorSecrets as a TorqSecrets +func (t CreateConnectorSecrets) AsTorqSecrets() (TorqSecrets, error) { + var body TorqSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTorqSecrets overwrites any union data inside the CreateConnectorSecrets as the provided TorqSecrets +func (t *CreateConnectorSecrets) FromTorqSecrets(v TorqSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTorqSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided TorqSecrets +func (t *CreateConnectorSecrets) MergeTorqSecrets(v TorqSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsWebhookSecrets returns the union data inside the CreateConnectorSecrets as a WebhookSecrets +func (t CreateConnectorSecrets) AsWebhookSecrets() (WebhookSecrets, error) { + var body WebhookSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromWebhookSecrets overwrites any union data inside the CreateConnectorSecrets as the provided WebhookSecrets +func (t *CreateConnectorSecrets) FromWebhookSecrets(v WebhookSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeWebhookSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided WebhookSecrets +func (t *CreateConnectorSecrets) MergeWebhookSecrets(v WebhookSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesWebhookSecrets returns the union data inside the CreateConnectorSecrets as a CasesWebhookSecrets +func (t CreateConnectorSecrets) AsCasesWebhookSecrets() (CasesWebhookSecrets, error) { + var body CasesWebhookSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesWebhookSecrets overwrites any union data inside the CreateConnectorSecrets as the provided CasesWebhookSecrets +func (t *CreateConnectorSecrets) FromCasesWebhookSecrets(v CasesWebhookSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesWebhookSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided CasesWebhookSecrets +func (t *CreateConnectorSecrets) MergeCasesWebhookSecrets(v CasesWebhookSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsXmattersSecrets returns the union data inside the CreateConnectorSecrets as a XmattersSecrets +func (t CreateConnectorSecrets) AsXmattersSecrets() (XmattersSecrets, error) { + var body XmattersSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromXmattersSecrets overwrites any union data inside the CreateConnectorSecrets as the provided XmattersSecrets +func (t *CreateConnectorSecrets) FromXmattersSecrets(v XmattersSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeXmattersSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided XmattersSecrets +func (t *CreateConnectorSecrets) MergeXmattersSecrets(v XmattersSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCreateParamResponse0 returns the union data inside the CreateParamResponse as a CreateParamResponse0 +func (t CreateParamResponse) AsCreateParamResponse0() (CreateParamResponse0, error) { + var body CreateParamResponse0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCreateParamResponse0 overwrites any union data inside the CreateParamResponse as the provided CreateParamResponse0 +func (t *CreateParamResponse) FromCreateParamResponse0(v CreateParamResponse0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCreateParamResponse0 performs a merge with any union data inside the CreateParamResponse, using the provided CreateParamResponse0 +func (t *CreateParamResponse) MergeCreateParamResponse0(v CreateParamResponse0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSyntheticsPostParameterResponse returns the union data inside the CreateParamResponse as a SyntheticsPostParameterResponse +func (t CreateParamResponse) AsSyntheticsPostParameterResponse() (SyntheticsPostParameterResponse, error) { + var body SyntheticsPostParameterResponse + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSyntheticsPostParameterResponse overwrites any union data inside the CreateParamResponse as the provided SyntheticsPostParameterResponse +func (t *CreateParamResponse) FromSyntheticsPostParameterResponse(v SyntheticsPostParameterResponse) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSyntheticsPostParameterResponse performs a merge with any union data inside the CreateParamResponse, using the provided SyntheticsPostParameterResponse +func (t *CreateParamResponse) MergeSyntheticsPostParameterResponse(v SyntheticsPostParameterResponse) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CreateParamResponse) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CreateParamResponse) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputElasticsearchSecretsSslKey0 returns the union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as a NewOutputElasticsearchSecretsSslKey0 +func (t NewOutputElasticsearch_Secrets_Ssl_Key) AsNewOutputElasticsearchSecretsSslKey0() (NewOutputElasticsearchSecretsSslKey0, error) { + var body NewOutputElasticsearchSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputElasticsearchSecretsSslKey0 overwrites any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as the provided NewOutputElasticsearchSecretsSslKey0 +func (t *NewOutputElasticsearch_Secrets_Ssl_Key) FromNewOutputElasticsearchSecretsSslKey0(v NewOutputElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputElasticsearchSecretsSslKey0 performs a merge with any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key, using the provided NewOutputElasticsearchSecretsSslKey0 +func (t *NewOutputElasticsearch_Secrets_Ssl_Key) MergeNewOutputElasticsearchSecretsSslKey0(v NewOutputElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputElasticsearchSecretsSslKey1 returns the union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as a NewOutputElasticsearchSecretsSslKey1 +func (t NewOutputElasticsearch_Secrets_Ssl_Key) AsNewOutputElasticsearchSecretsSslKey1() (NewOutputElasticsearchSecretsSslKey1, error) { + var body NewOutputElasticsearchSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputElasticsearchSecretsSslKey1 overwrites any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as the provided NewOutputElasticsearchSecretsSslKey1 +func (t *NewOutputElasticsearch_Secrets_Ssl_Key) FromNewOutputElasticsearchSecretsSslKey1(v NewOutputElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputElasticsearchSecretsSslKey1 performs a merge with any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key, using the provided NewOutputElasticsearchSecretsSslKey1 +func (t *NewOutputElasticsearch_Secrets_Ssl_Key) MergeNewOutputElasticsearchSecretsSslKey1(v NewOutputElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputKafkaSecretsPassword0 returns the union data inside the NewOutputKafka_Secrets_Password as a NewOutputKafkaSecretsPassword0 +func (t NewOutputKafka_Secrets_Password) AsNewOutputKafkaSecretsPassword0() (NewOutputKafkaSecretsPassword0, error) { + var body NewOutputKafkaSecretsPassword0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsPassword0 overwrites any union data inside the NewOutputKafka_Secrets_Password as the provided NewOutputKafkaSecretsPassword0 +func (t *NewOutputKafka_Secrets_Password) FromNewOutputKafkaSecretsPassword0(v NewOutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsPassword0 performs a merge with any union data inside the NewOutputKafka_Secrets_Password, using the provided NewOutputKafkaSecretsPassword0 +func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword0(v NewOutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputKafkaSecretsPassword1 returns the union data inside the NewOutputKafka_Secrets_Password as a NewOutputKafkaSecretsPassword1 +func (t NewOutputKafka_Secrets_Password) AsNewOutputKafkaSecretsPassword1() (NewOutputKafkaSecretsPassword1, error) { + var body NewOutputKafkaSecretsPassword1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsPassword1 overwrites any union data inside the NewOutputKafka_Secrets_Password as the provided NewOutputKafkaSecretsPassword1 +func (t *NewOutputKafka_Secrets_Password) FromNewOutputKafkaSecretsPassword1(v NewOutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsPassword1 performs a merge with any union data inside the NewOutputKafka_Secrets_Password, using the provided NewOutputKafkaSecretsPassword1 +func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword1(v NewOutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputKafkaSecretsSslKey0 returns the union data inside the NewOutputKafka_Secrets_Ssl_Key as a NewOutputKafkaSecretsSslKey0 +func (t NewOutputKafka_Secrets_Ssl_Key) AsNewOutputKafkaSecretsSslKey0() (NewOutputKafkaSecretsSslKey0, error) { + var body NewOutputKafkaSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsSslKey0 overwrites any union data inside the NewOutputKafka_Secrets_Ssl_Key as the provided NewOutputKafkaSecretsSslKey0 +func (t *NewOutputKafka_Secrets_Ssl_Key) FromNewOutputKafkaSecretsSslKey0(v NewOutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsSslKey0 performs a merge with any union data inside the NewOutputKafka_Secrets_Ssl_Key, using the provided NewOutputKafkaSecretsSslKey0 +func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey0(v NewOutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputKafkaSecretsSslKey1 returns the union data inside the NewOutputKafka_Secrets_Ssl_Key as a NewOutputKafkaSecretsSslKey1 +func (t NewOutputKafka_Secrets_Ssl_Key) AsNewOutputKafkaSecretsSslKey1() (NewOutputKafkaSecretsSslKey1, error) { + var body NewOutputKafkaSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafkaSecretsSslKey1 overwrites any union data inside the NewOutputKafka_Secrets_Ssl_Key as the provided NewOutputKafkaSecretsSslKey1 +func (t *NewOutputKafka_Secrets_Ssl_Key) FromNewOutputKafkaSecretsSslKey1(v NewOutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafkaSecretsSslKey1 performs a merge with any union data inside the NewOutputKafka_Secrets_Ssl_Key, using the provided NewOutputKafkaSecretsSslKey1 +func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey1(v NewOutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputLogstashSecretsSslKey0 returns the union data inside the NewOutputLogstash_Secrets_Ssl_Key as a NewOutputLogstashSecretsSslKey0 +func (t NewOutputLogstash_Secrets_Ssl_Key) AsNewOutputLogstashSecretsSslKey0() (NewOutputLogstashSecretsSslKey0, error) { + var body NewOutputLogstashSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputLogstashSecretsSslKey0 overwrites any union data inside the NewOutputLogstash_Secrets_Ssl_Key as the provided NewOutputLogstashSecretsSslKey0 +func (t *NewOutputLogstash_Secrets_Ssl_Key) FromNewOutputLogstashSecretsSslKey0(v NewOutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputLogstashSecretsSslKey0 performs a merge with any union data inside the NewOutputLogstash_Secrets_Ssl_Key, using the provided NewOutputLogstashSecretsSslKey0 +func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey0(v NewOutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputLogstashSecretsSslKey1 returns the union data inside the NewOutputLogstash_Secrets_Ssl_Key as a NewOutputLogstashSecretsSslKey1 +func (t NewOutputLogstash_Secrets_Ssl_Key) AsNewOutputLogstashSecretsSslKey1() (NewOutputLogstashSecretsSslKey1, error) { + var body NewOutputLogstashSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputLogstashSecretsSslKey1 overwrites any union data inside the NewOutputLogstash_Secrets_Ssl_Key as the provided NewOutputLogstashSecretsSslKey1 +func (t *NewOutputLogstash_Secrets_Ssl_Key) FromNewOutputLogstashSecretsSslKey1(v NewOutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputLogstashSecretsSslKey1 performs a merge with any union data inside the NewOutputLogstash_Secrets_Ssl_Key, using the provided NewOutputLogstashSecretsSslKey1 +func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey1(v NewOutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as a NewOutputRemoteElasticsearchSecretsServiceToken0 +func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) AsNewOutputRemoteElasticsearchSecretsServiceToken0() (NewOutputRemoteElasticsearchSecretsServiceToken0, error) { + var body NewOutputRemoteElasticsearchSecretsServiceToken0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as the provided NewOutputRemoteElasticsearchSecretsServiceToken0 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) FromNewOutputRemoteElasticsearchSecretsServiceToken0(v NewOutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided NewOutputRemoteElasticsearchSecretsServiceToken0 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemoteElasticsearchSecretsServiceToken0(v NewOutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as a NewOutputRemoteElasticsearchSecretsServiceToken1 +func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) AsNewOutputRemoteElasticsearchSecretsServiceToken1() (NewOutputRemoteElasticsearchSecretsServiceToken1, error) { + var body NewOutputRemoteElasticsearchSecretsServiceToken1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as the provided NewOutputRemoteElasticsearchSecretsServiceToken1 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) FromNewOutputRemoteElasticsearchSecretsServiceToken1(v NewOutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided NewOutputRemoteElasticsearchSecretsServiceToken1 +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemoteElasticsearchSecretsServiceToken1(v NewOutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputRemoteElasticsearchSecretsSslKey0 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as a NewOutputRemoteElasticsearchSecretsSslKey0 +func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) AsNewOutputRemoteElasticsearchSecretsSslKey0() (NewOutputRemoteElasticsearchSecretsSslKey0, error) { + var body NewOutputRemoteElasticsearchSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearchSecretsSslKey0 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided NewOutputRemoteElasticsearchSecretsSslKey0 +func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) FromNewOutputRemoteElasticsearchSecretsSslKey0(v NewOutputRemoteElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearchSecretsSslKey0 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided NewOutputRemoteElasticsearchSecretsSslKey0 +func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeNewOutputRemoteElasticsearchSecretsSslKey0(v NewOutputRemoteElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputRemoteElasticsearchSecretsSslKey1 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as a NewOutputRemoteElasticsearchSecretsSslKey1 +func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) AsNewOutputRemoteElasticsearchSecretsSslKey1() (NewOutputRemoteElasticsearchSecretsSslKey1, error) { + var body NewOutputRemoteElasticsearchSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearchSecretsSslKey1 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided NewOutputRemoteElasticsearchSecretsSslKey1 +func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) FromNewOutputRemoteElasticsearchSecretsSslKey1(v NewOutputRemoteElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearchSecretsSslKey1 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided NewOutputRemoteElasticsearchSecretsSslKey1 +func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeNewOutputRemoteElasticsearchSecretsSslKey1(v NewOutputRemoteElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsNewOutputElasticsearch returns the union data inside the NewOutputUnion as a NewOutputElasticsearch +func (t NewOutputUnion) AsNewOutputElasticsearch() (NewOutputElasticsearch, error) { + var body NewOutputElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputElasticsearch +func (t *NewOutputUnion) FromNewOutputElasticsearch(v NewOutputElasticsearch) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputElasticsearch +func (t *NewOutputUnion) MergeNewOutputElasticsearch(v NewOutputElasticsearch) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputRemoteElasticsearch returns the union data inside the NewOutputUnion as a NewOutputRemoteElasticsearch +func (t NewOutputUnion) AsNewOutputRemoteElasticsearch() (NewOutputRemoteElasticsearch, error) { + var body NewOutputRemoteElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputRemoteElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputRemoteElasticsearch +func (t *NewOutputUnion) FromNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputRemoteElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputRemoteElasticsearch +func (t *NewOutputUnion) MergeNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputLogstash returns the union data inside the NewOutputUnion as a NewOutputLogstash +func (t NewOutputUnion) AsNewOutputLogstash() (NewOutputLogstash, error) { + var body NewOutputLogstash + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputLogstash overwrites any union data inside the NewOutputUnion as the provided NewOutputLogstash +func (t *NewOutputUnion) FromNewOutputLogstash(v NewOutputLogstash) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputLogstash performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputLogstash +func (t *NewOutputUnion) MergeNewOutputLogstash(v NewOutputLogstash) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsNewOutputKafka returns the union data inside the NewOutputUnion as a NewOutputKafka +func (t NewOutputUnion) AsNewOutputKafka() (NewOutputKafka, error) { + var body NewOutputKafka + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromNewOutputKafka overwrites any union data inside the NewOutputUnion as the provided NewOutputKafka +func (t *NewOutputUnion) FromNewOutputKafka(v NewOutputKafka) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeNewOutputKafka performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputKafka +func (t *NewOutputUnion) MergeNewOutputKafka(v NewOutputKafka) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t NewOutputUnion) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *NewOutputUnion) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputElasticsearchSecretsSslKey0 returns the union data inside the OutputElasticsearch_Secrets_Ssl_Key as a OutputElasticsearchSecretsSslKey0 +func (t OutputElasticsearch_Secrets_Ssl_Key) AsOutputElasticsearchSecretsSslKey0() (OutputElasticsearchSecretsSslKey0, error) { + var body OutputElasticsearchSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputElasticsearchSecretsSslKey0 overwrites any union data inside the OutputElasticsearch_Secrets_Ssl_Key as the provided OutputElasticsearchSecretsSslKey0 +func (t *OutputElasticsearch_Secrets_Ssl_Key) FromOutputElasticsearchSecretsSslKey0(v OutputElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputElasticsearchSecretsSslKey0 performs a merge with any union data inside the OutputElasticsearch_Secrets_Ssl_Key, using the provided OutputElasticsearchSecretsSslKey0 +func (t *OutputElasticsearch_Secrets_Ssl_Key) MergeOutputElasticsearchSecretsSslKey0(v OutputElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputElasticsearchSecretsSslKey1 returns the union data inside the OutputElasticsearch_Secrets_Ssl_Key as a OutputElasticsearchSecretsSslKey1 +func (t OutputElasticsearch_Secrets_Ssl_Key) AsOutputElasticsearchSecretsSslKey1() (OutputElasticsearchSecretsSslKey1, error) { + var body OutputElasticsearchSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputElasticsearchSecretsSslKey1 overwrites any union data inside the OutputElasticsearch_Secrets_Ssl_Key as the provided OutputElasticsearchSecretsSslKey1 +func (t *OutputElasticsearch_Secrets_Ssl_Key) FromOutputElasticsearchSecretsSslKey1(v OutputElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputElasticsearchSecretsSslKey1 performs a merge with any union data inside the OutputElasticsearch_Secrets_Ssl_Key, using the provided OutputElasticsearchSecretsSslKey1 +func (t *OutputElasticsearch_Secrets_Ssl_Key) MergeOutputElasticsearchSecretsSslKey1(v OutputElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputKafkaSecretsPassword0 returns the union data inside the OutputKafka_Secrets_Password as a OutputKafkaSecretsPassword0 +func (t OutputKafka_Secrets_Password) AsOutputKafkaSecretsPassword0() (OutputKafkaSecretsPassword0, error) { + var body OutputKafkaSecretsPassword0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsPassword0 overwrites any union data inside the OutputKafka_Secrets_Password as the provided OutputKafkaSecretsPassword0 +func (t *OutputKafka_Secrets_Password) FromOutputKafkaSecretsPassword0(v OutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsPassword0 performs a merge with any union data inside the OutputKafka_Secrets_Password, using the provided OutputKafkaSecretsPassword0 +func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword0(v OutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputKafkaSecretsPassword1 returns the union data inside the OutputKafka_Secrets_Password as a OutputKafkaSecretsPassword1 +func (t OutputKafka_Secrets_Password) AsOutputKafkaSecretsPassword1() (OutputKafkaSecretsPassword1, error) { + var body OutputKafkaSecretsPassword1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsPassword1 overwrites any union data inside the OutputKafka_Secrets_Password as the provided OutputKafkaSecretsPassword1 +func (t *OutputKafka_Secrets_Password) FromOutputKafkaSecretsPassword1(v OutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsPassword1 performs a merge with any union data inside the OutputKafka_Secrets_Password, using the provided OutputKafkaSecretsPassword1 +func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword1(v OutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputKafkaSecretsSslKey0 returns the union data inside the OutputKafka_Secrets_Ssl_Key as a OutputKafkaSecretsSslKey0 +func (t OutputKafka_Secrets_Ssl_Key) AsOutputKafkaSecretsSslKey0() (OutputKafkaSecretsSslKey0, error) { + var body OutputKafkaSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsSslKey0 overwrites any union data inside the OutputKafka_Secrets_Ssl_Key as the provided OutputKafkaSecretsSslKey0 +func (t *OutputKafka_Secrets_Ssl_Key) FromOutputKafkaSecretsSslKey0(v OutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsSslKey0 performs a merge with any union data inside the OutputKafka_Secrets_Ssl_Key, using the provided OutputKafkaSecretsSslKey0 +func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey0(v OutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputKafkaSecretsSslKey1 returns the union data inside the OutputKafka_Secrets_Ssl_Key as a OutputKafkaSecretsSslKey1 +func (t OutputKafka_Secrets_Ssl_Key) AsOutputKafkaSecretsSslKey1() (OutputKafkaSecretsSslKey1, error) { + var body OutputKafkaSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafkaSecretsSslKey1 overwrites any union data inside the OutputKafka_Secrets_Ssl_Key as the provided OutputKafkaSecretsSslKey1 +func (t *OutputKafka_Secrets_Ssl_Key) FromOutputKafkaSecretsSslKey1(v OutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafkaSecretsSslKey1 performs a merge with any union data inside the OutputKafka_Secrets_Ssl_Key, using the provided OutputKafkaSecretsSslKey1 +func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey1(v OutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputLogstashSecretsSslKey0 returns the union data inside the OutputLogstash_Secrets_Ssl_Key as a OutputLogstashSecretsSslKey0 +func (t OutputLogstash_Secrets_Ssl_Key) AsOutputLogstashSecretsSslKey0() (OutputLogstashSecretsSslKey0, error) { + var body OutputLogstashSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputLogstashSecretsSslKey0 overwrites any union data inside the OutputLogstash_Secrets_Ssl_Key as the provided OutputLogstashSecretsSslKey0 +func (t *OutputLogstash_Secrets_Ssl_Key) FromOutputLogstashSecretsSslKey0(v OutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputLogstashSecretsSslKey0 performs a merge with any union data inside the OutputLogstash_Secrets_Ssl_Key, using the provided OutputLogstashSecretsSslKey0 +func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey0(v OutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputLogstashSecretsSslKey1 returns the union data inside the OutputLogstash_Secrets_Ssl_Key as a OutputLogstashSecretsSslKey1 +func (t OutputLogstash_Secrets_Ssl_Key) AsOutputLogstashSecretsSslKey1() (OutputLogstashSecretsSslKey1, error) { + var body OutputLogstashSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputLogstashSecretsSslKey1 overwrites any union data inside the OutputLogstash_Secrets_Ssl_Key as the provided OutputLogstashSecretsSslKey1 +func (t *OutputLogstash_Secrets_Ssl_Key) FromOutputLogstashSecretsSslKey1(v OutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputLogstashSecretsSslKey1 performs a merge with any union data inside the OutputLogstash_Secrets_Ssl_Key, using the provided OutputLogstashSecretsSslKey1 +func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey1(v OutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as a OutputRemoteElasticsearchSecretsServiceToken0 +func (t OutputRemoteElasticsearch_Secrets_ServiceToken) AsOutputRemoteElasticsearchSecretsServiceToken0() (OutputRemoteElasticsearchSecretsServiceToken0, error) { + var body OutputRemoteElasticsearchSecretsServiceToken0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as the provided OutputRemoteElasticsearchSecretsServiceToken0 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) FromOutputRemoteElasticsearchSecretsServiceToken0(v OutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken, using the provided OutputRemoteElasticsearchSecretsServiceToken0 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasticsearchSecretsServiceToken0(v OutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as a OutputRemoteElasticsearchSecretsServiceToken1 +func (t OutputRemoteElasticsearch_Secrets_ServiceToken) AsOutputRemoteElasticsearchSecretsServiceToken1() (OutputRemoteElasticsearchSecretsServiceToken1, error) { + var body OutputRemoteElasticsearchSecretsServiceToken1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as the provided OutputRemoteElasticsearchSecretsServiceToken1 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) FromOutputRemoteElasticsearchSecretsServiceToken1(v OutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken, using the provided OutputRemoteElasticsearchSecretsServiceToken1 +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasticsearchSecretsServiceToken1(v OutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputRemoteElasticsearchSecretsSslKey0 returns the union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as a OutputRemoteElasticsearchSecretsSslKey0 +func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) AsOutputRemoteElasticsearchSecretsSslKey0() (OutputRemoteElasticsearchSecretsSslKey0, error) { + var body OutputRemoteElasticsearchSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearchSecretsSslKey0 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as the provided OutputRemoteElasticsearchSecretsSslKey0 +func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) FromOutputRemoteElasticsearchSecretsSslKey0(v OutputRemoteElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearchSecretsSslKey0 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided OutputRemoteElasticsearchSecretsSslKey0 +func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) MergeOutputRemoteElasticsearchSecretsSslKey0(v OutputRemoteElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputRemoteElasticsearchSecretsSslKey1 returns the union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as a OutputRemoteElasticsearchSecretsSslKey1 +func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) AsOutputRemoteElasticsearchSecretsSslKey1() (OutputRemoteElasticsearchSecretsSslKey1, error) { + var body OutputRemoteElasticsearchSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearchSecretsSslKey1 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as the provided OutputRemoteElasticsearchSecretsSslKey1 +func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) FromOutputRemoteElasticsearchSecretsSslKey1(v OutputRemoteElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearchSecretsSslKey1 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided OutputRemoteElasticsearchSecretsSslKey1 +func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) MergeOutputRemoteElasticsearchSecretsSslKey1(v OutputRemoteElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsOutputElasticsearch returns the union data inside the OutputUnion as a OutputElasticsearch +func (t OutputUnion) AsOutputElasticsearch() (OutputElasticsearch, error) { + var body OutputElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputElasticsearch overwrites any union data inside the OutputUnion as the provided OutputElasticsearch +func (t *OutputUnion) FromOutputElasticsearch(v OutputElasticsearch) error { + v.Type = "elasticsearch" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputElasticsearch performs a merge with any union data inside the OutputUnion, using the provided OutputElasticsearch +func (t *OutputUnion) MergeOutputElasticsearch(v OutputElasticsearch) error { + v.Type = "elasticsearch" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputRemoteElasticsearch returns the union data inside the OutputUnion as a OutputRemoteElasticsearch +func (t OutputUnion) AsOutputRemoteElasticsearch() (OutputRemoteElasticsearch, error) { + var body OutputRemoteElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputRemoteElasticsearch overwrites any union data inside the OutputUnion as the provided OutputRemoteElasticsearch +func (t *OutputUnion) FromOutputRemoteElasticsearch(v OutputRemoteElasticsearch) error { + v.Type = "remote_elasticsearch" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputRemoteElasticsearch performs a merge with any union data inside the OutputUnion, using the provided OutputRemoteElasticsearch +func (t *OutputUnion) MergeOutputRemoteElasticsearch(v OutputRemoteElasticsearch) error { + v.Type = "remote_elasticsearch" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputLogstash returns the union data inside the OutputUnion as a OutputLogstash +func (t OutputUnion) AsOutputLogstash() (OutputLogstash, error) { + var body OutputLogstash + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputLogstash overwrites any union data inside the OutputUnion as the provided OutputLogstash +func (t *OutputUnion) FromOutputLogstash(v OutputLogstash) error { + v.Type = "logstash" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputLogstash performs a merge with any union data inside the OutputUnion, using the provided OutputLogstash +func (t *OutputUnion) MergeOutputLogstash(v OutputLogstash) error { + v.Type = "logstash" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOutputKafka returns the union data inside the OutputUnion as a OutputKafka +func (t OutputUnion) AsOutputKafka() (OutputKafka, error) { + var body OutputKafka + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOutputKafka overwrites any union data inside the OutputUnion as the provided OutputKafka +func (t *OutputUnion) FromOutputKafka(v OutputKafka) error { + v.Type = "kafka" + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOutputKafka performs a merge with any union data inside the OutputUnion, using the provided OutputKafka +func (t *OutputUnion) MergeOutputKafka(v OutputKafka) error { + v.Type = "kafka" + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t OutputUnion) Discriminator() (string, error) { + var discriminator struct { + Discriminator string `json:"type"` + } + err := json.Unmarshal(t.union, &discriminator) + return discriminator.Discriminator, err +} + +func (t OutputUnion) ValueByDiscriminator() (interface{}, error) { + discriminator, err := t.Discriminator() + if err != nil { + return nil, err + } + switch discriminator { + case "elasticsearch": + return t.AsOutputElasticsearch() + case "kafka": + return t.AsOutputKafka() + case "logstash": + return t.AsOutputLogstash() + case "remote_elasticsearch": + return t.AsOutputRemoteElasticsearch() + default: + return nil, errors.New("unknown discriminator value: " + discriminator) + } +} + +func (t OutputUnion) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *OutputUnion) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 returns the union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0() (PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0, error) { + var body PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 overwrites any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 performs a merge with any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 returns the union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1() (PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1, error) { + var body PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 overwrites any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 performs a merge with any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageInfoInstallationInfoInstalledKibanaType0 returns the union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as a PackageInfoInstallationInfoInstalledKibanaType0 +func (t PackageInfo_InstallationInfo_InstalledKibana_Type) AsPackageInfoInstallationInfoInstalledKibanaType0() (PackageInfoInstallationInfoInstalledKibanaType0, error) { + var body PackageInfoInstallationInfoInstalledKibanaType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoInstallationInfoInstalledKibanaType0 overwrites any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as the provided PackageInfoInstallationInfoInstalledKibanaType0 +func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) FromPackageInfoInstallationInfoInstalledKibanaType0(v PackageInfoInstallationInfoInstalledKibanaType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoInstallationInfoInstalledKibanaType0 performs a merge with any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type, using the provided PackageInfoInstallationInfoInstalledKibanaType0 +func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) MergePackageInfoInstallationInfoInstalledKibanaType0(v PackageInfoInstallationInfoInstalledKibanaType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageInfoInstallationInfoInstalledKibanaType1 returns the union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as a PackageInfoInstallationInfoInstalledKibanaType1 +func (t PackageInfo_InstallationInfo_InstalledKibana_Type) AsPackageInfoInstallationInfoInstalledKibanaType1() (PackageInfoInstallationInfoInstalledKibanaType1, error) { + var body PackageInfoInstallationInfoInstalledKibanaType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoInstallationInfoInstalledKibanaType1 overwrites any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as the provided PackageInfoInstallationInfoInstalledKibanaType1 +func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) FromPackageInfoInstallationInfoInstalledKibanaType1(v PackageInfoInstallationInfoInstalledKibanaType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoInstallationInfoInstalledKibanaType1 performs a merge with any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type, using the provided PackageInfoInstallationInfoInstalledKibanaType1 +func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) MergePackageInfoInstallationInfoInstalledKibanaType1(v PackageInfoInstallationInfoInstalledKibanaType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageInfo_InstallationInfo_InstalledKibana_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageInfoType0 returns the union data inside the PackageInfo_Type as a PackageInfoType0 +func (t PackageInfo_Type) AsPackageInfoType0() (PackageInfoType0, error) { + var body PackageInfoType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoType0 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType0 +func (t *PackageInfo_Type) FromPackageInfoType0(v PackageInfoType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoType0 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType0 +func (t *PackageInfo_Type) MergePackageInfoType0(v PackageInfoType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageInfoType1 returns the union data inside the PackageInfo_Type as a PackageInfoType1 +func (t PackageInfo_Type) AsPackageInfoType1() (PackageInfoType1, error) { + var body PackageInfoType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoType1 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType1 +func (t *PackageInfo_Type) FromPackageInfoType1(v PackageInfoType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoType1 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType1 +func (t *PackageInfo_Type) MergePackageInfoType1(v PackageInfoType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageInfoType2 returns the union data inside the PackageInfo_Type as a PackageInfoType2 +func (t PackageInfo_Type) AsPackageInfoType2() (PackageInfoType2, error) { + var body PackageInfoType2 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoType2 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType2 +func (t *PackageInfo_Type) FromPackageInfoType2(v PackageInfoType2) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoType2 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType2 +func (t *PackageInfo_Type) MergePackageInfoType2(v PackageInfoType2) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageInfoType3 returns the union data inside the PackageInfo_Type as a PackageInfoType3 +func (t PackageInfo_Type) AsPackageInfoType3() (PackageInfoType3, error) { + var body PackageInfoType3 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageInfoType3 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType3 +func (t *PackageInfo_Type) FromPackageInfoType3(v PackageInfoType3) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageInfoType3 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType3 +func (t *PackageInfo_Type) MergePackageInfoType3(v PackageInfoType3) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageInfo_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageInfo_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 returns the union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0() (PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0, error) { + var body PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 overwrites any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 performs a merge with any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 +func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 returns the union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1() (PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1, error) { + var body PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 overwrites any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 performs a merge with any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 +func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageListItemInstallationInfoInstalledKibanaType0 returns the union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as a PackageListItemInstallationInfoInstalledKibanaType0 +func (t PackageListItem_InstallationInfo_InstalledKibana_Type) AsPackageListItemInstallationInfoInstalledKibanaType0() (PackageListItemInstallationInfoInstalledKibanaType0, error) { + var body PackageListItemInstallationInfoInstalledKibanaType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemInstallationInfoInstalledKibanaType0 overwrites any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as the provided PackageListItemInstallationInfoInstalledKibanaType0 +func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) FromPackageListItemInstallationInfoInstalledKibanaType0(v PackageListItemInstallationInfoInstalledKibanaType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemInstallationInfoInstalledKibanaType0 performs a merge with any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type, using the provided PackageListItemInstallationInfoInstalledKibanaType0 +func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) MergePackageListItemInstallationInfoInstalledKibanaType0(v PackageListItemInstallationInfoInstalledKibanaType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageListItemInstallationInfoInstalledKibanaType1 returns the union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as a PackageListItemInstallationInfoInstalledKibanaType1 +func (t PackageListItem_InstallationInfo_InstalledKibana_Type) AsPackageListItemInstallationInfoInstalledKibanaType1() (PackageListItemInstallationInfoInstalledKibanaType1, error) { + var body PackageListItemInstallationInfoInstalledKibanaType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemInstallationInfoInstalledKibanaType1 overwrites any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as the provided PackageListItemInstallationInfoInstalledKibanaType1 +func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) FromPackageListItemInstallationInfoInstalledKibanaType1(v PackageListItemInstallationInfoInstalledKibanaType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemInstallationInfoInstalledKibanaType1 performs a merge with any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type, using the provided PackageListItemInstallationInfoInstalledKibanaType1 +func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) MergePackageListItemInstallationInfoInstalledKibanaType1(v PackageListItemInstallationInfoInstalledKibanaType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageListItem_InstallationInfo_InstalledKibana_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsPackageListItemType0 returns the union data inside the PackageListItem_Type as a PackageListItemType0 +func (t PackageListItem_Type) AsPackageListItemType0() (PackageListItemType0, error) { + var body PackageListItemType0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemType0 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType0 +func (t *PackageListItem_Type) FromPackageListItemType0(v PackageListItemType0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemType0 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType0 +func (t *PackageListItem_Type) MergePackageListItemType0(v PackageListItemType0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageListItemType1 returns the union data inside the PackageListItem_Type as a PackageListItemType1 +func (t PackageListItem_Type) AsPackageListItemType1() (PackageListItemType1, error) { + var body PackageListItemType1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemType1 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType1 +func (t *PackageListItem_Type) FromPackageListItemType1(v PackageListItemType1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemType1 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType1 +func (t *PackageListItem_Type) MergePackageListItemType1(v PackageListItemType1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageListItemType2 returns the union data inside the PackageListItem_Type as a PackageListItemType2 +func (t PackageListItem_Type) AsPackageListItemType2() (PackageListItemType2, error) { + var body PackageListItemType2 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemType2 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType2 +func (t *PackageListItem_Type) FromPackageListItemType2(v PackageListItemType2) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemType2 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType2 +func (t *PackageListItem_Type) MergePackageListItemType2(v PackageListItemType2) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPackageListItemType3 returns the union data inside the PackageListItem_Type as a PackageListItemType3 +func (t PackageListItem_Type) AsPackageListItemType3() (PackageListItemType3, error) { + var body PackageListItemType3 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPackageListItemType3 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType3 +func (t *PackageListItem_Type) FromPackageListItemType3(v PackageListItemType3) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePackageListItemType3 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType3 +func (t *PackageListItem_Type) MergePackageListItemType3(v PackageListItemType3) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t PackageListItem_Type) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *PackageListItem_Type) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsParamsEsQueryDslRuleIndex0 returns the union data inside the ParamsEsQueryDslRule_Index as a ParamsEsQueryDslRuleIndex0 +func (t ParamsEsQueryDslRule_Index) AsParamsEsQueryDslRuleIndex0() (ParamsEsQueryDslRuleIndex0, error) { + var body ParamsEsQueryDslRuleIndex0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsEsQueryDslRuleIndex0 overwrites any union data inside the ParamsEsQueryDslRule_Index as the provided ParamsEsQueryDslRuleIndex0 +func (t *ParamsEsQueryDslRule_Index) FromParamsEsQueryDslRuleIndex0(v ParamsEsQueryDslRuleIndex0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsEsQueryDslRuleIndex0 performs a merge with any union data inside the ParamsEsQueryDslRule_Index, using the provided ParamsEsQueryDslRuleIndex0 +func (t *ParamsEsQueryDslRule_Index) MergeParamsEsQueryDslRuleIndex0(v ParamsEsQueryDslRuleIndex0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsParamsEsQueryDslRuleIndex1 returns the union data inside the ParamsEsQueryDslRule_Index as a ParamsEsQueryDslRuleIndex1 +func (t ParamsEsQueryDslRule_Index) AsParamsEsQueryDslRuleIndex1() (ParamsEsQueryDslRuleIndex1, error) { + var body ParamsEsQueryDslRuleIndex1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsEsQueryDslRuleIndex1 overwrites any union data inside the ParamsEsQueryDslRule_Index as the provided ParamsEsQueryDslRuleIndex1 +func (t *ParamsEsQueryDslRule_Index) FromParamsEsQueryDslRuleIndex1(v ParamsEsQueryDslRuleIndex1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsEsQueryDslRuleIndex1 performs a merge with any union data inside the ParamsEsQueryDslRule_Index, using the provided ParamsEsQueryDslRuleIndex1 +func (t *ParamsEsQueryDslRule_Index) MergeParamsEsQueryDslRuleIndex1(v ParamsEsQueryDslRuleIndex1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ParamsEsQueryDslRule_Index) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ParamsEsQueryDslRule_Index) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsParamsEsQueryKqlRuleSearchConfigurationIndex0 returns the union data inside the ParamsEsQueryKqlRule_SearchConfiguration_Index as a ParamsEsQueryKqlRuleSearchConfigurationIndex0 +func (t ParamsEsQueryKqlRule_SearchConfiguration_Index) AsParamsEsQueryKqlRuleSearchConfigurationIndex0() (ParamsEsQueryKqlRuleSearchConfigurationIndex0, error) { + var body ParamsEsQueryKqlRuleSearchConfigurationIndex0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsEsQueryKqlRuleSearchConfigurationIndex0 overwrites any union data inside the ParamsEsQueryKqlRule_SearchConfiguration_Index as the provided ParamsEsQueryKqlRuleSearchConfigurationIndex0 +func (t *ParamsEsQueryKqlRule_SearchConfiguration_Index) FromParamsEsQueryKqlRuleSearchConfigurationIndex0(v ParamsEsQueryKqlRuleSearchConfigurationIndex0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsEsQueryKqlRuleSearchConfigurationIndex0 performs a merge with any union data inside the ParamsEsQueryKqlRule_SearchConfiguration_Index, using the provided ParamsEsQueryKqlRuleSearchConfigurationIndex0 +func (t *ParamsEsQueryKqlRule_SearchConfiguration_Index) MergeParamsEsQueryKqlRuleSearchConfigurationIndex0(v ParamsEsQueryKqlRuleSearchConfigurationIndex0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsParamsEsQueryKqlRuleSearchConfigurationIndex1 returns the union data inside the ParamsEsQueryKqlRule_SearchConfiguration_Index as a ParamsEsQueryKqlRuleSearchConfigurationIndex1 +func (t ParamsEsQueryKqlRule_SearchConfiguration_Index) AsParamsEsQueryKqlRuleSearchConfigurationIndex1() (ParamsEsQueryKqlRuleSearchConfigurationIndex1, error) { + var body ParamsEsQueryKqlRuleSearchConfigurationIndex1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsEsQueryKqlRuleSearchConfigurationIndex1 overwrites any union data inside the ParamsEsQueryKqlRule_SearchConfiguration_Index as the provided ParamsEsQueryKqlRuleSearchConfigurationIndex1 +func (t *ParamsEsQueryKqlRule_SearchConfiguration_Index) FromParamsEsQueryKqlRuleSearchConfigurationIndex1(v ParamsEsQueryKqlRuleSearchConfigurationIndex1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsEsQueryKqlRuleSearchConfigurationIndex1 performs a merge with any union data inside the ParamsEsQueryKqlRule_SearchConfiguration_Index, using the provided ParamsEsQueryKqlRuleSearchConfigurationIndex1 +func (t *ParamsEsQueryKqlRule_SearchConfiguration_Index) MergeParamsEsQueryKqlRuleSearchConfigurationIndex1(v ParamsEsQueryKqlRuleSearchConfigurationIndex1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ParamsEsQueryKqlRule_SearchConfiguration_Index) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ParamsEsQueryKqlRule_SearchConfiguration_Index) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsParamsPropertyInfraMetricThresholdCriteria2CustomMetric0 returns the union data inside the ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item as a ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0 +func (t ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item) AsParamsPropertyInfraMetricThresholdCriteria2CustomMetric0() (ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0, error) { + var body ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyInfraMetricThresholdCriteria2CustomMetric0 overwrites any union data inside the ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item as the provided ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0 +func (t *ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item) FromParamsPropertyInfraMetricThresholdCriteria2CustomMetric0(v ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyInfraMetricThresholdCriteria2CustomMetric0 performs a merge with any union data inside the ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item, using the provided ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0 +func (t *ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item) MergeParamsPropertyInfraMetricThresholdCriteria2CustomMetric0(v ParamsPropertyInfraMetricThresholdCriteria2CustomMetric0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsParamsPropertyInfraMetricThresholdCriteria2CustomMetric1 returns the union data inside the ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item as a ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1 +func (t ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item) AsParamsPropertyInfraMetricThresholdCriteria2CustomMetric1() (ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1, error) { + var body ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyInfraMetricThresholdCriteria2CustomMetric1 overwrites any union data inside the ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item as the provided ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1 +func (t *ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item) FromParamsPropertyInfraMetricThresholdCriteria2CustomMetric1(v ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyInfraMetricThresholdCriteria2CustomMetric1 performs a merge with any union data inside the ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item, using the provided ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1 +func (t *ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item) MergeParamsPropertyInfraMetricThresholdCriteria2CustomMetric1(v ParamsPropertyInfraMetricThresholdCriteria2CustomMetric1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ParamsPropertyInfraMetricThreshold_Criteria_2_CustomMetric_Item) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsParamsPropertyInfraMetricThresholdCriteria0 returns the union data inside the ParamsPropertyInfraMetricThreshold_Criteria_Item as a ParamsPropertyInfraMetricThresholdCriteria0 +func (t ParamsPropertyInfraMetricThreshold_Criteria_Item) AsParamsPropertyInfraMetricThresholdCriteria0() (ParamsPropertyInfraMetricThresholdCriteria0, error) { + var body ParamsPropertyInfraMetricThresholdCriteria0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyInfraMetricThresholdCriteria0 overwrites any union data inside the ParamsPropertyInfraMetricThreshold_Criteria_Item as the provided ParamsPropertyInfraMetricThresholdCriteria0 +func (t *ParamsPropertyInfraMetricThreshold_Criteria_Item) FromParamsPropertyInfraMetricThresholdCriteria0(v ParamsPropertyInfraMetricThresholdCriteria0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyInfraMetricThresholdCriteria0 performs a merge with any union data inside the ParamsPropertyInfraMetricThreshold_Criteria_Item, using the provided ParamsPropertyInfraMetricThresholdCriteria0 +func (t *ParamsPropertyInfraMetricThreshold_Criteria_Item) MergeParamsPropertyInfraMetricThresholdCriteria0(v ParamsPropertyInfraMetricThresholdCriteria0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsParamsPropertyInfraMetricThresholdCriteria1 returns the union data inside the ParamsPropertyInfraMetricThreshold_Criteria_Item as a ParamsPropertyInfraMetricThresholdCriteria1 +func (t ParamsPropertyInfraMetricThreshold_Criteria_Item) AsParamsPropertyInfraMetricThresholdCriteria1() (ParamsPropertyInfraMetricThresholdCriteria1, error) { + var body ParamsPropertyInfraMetricThresholdCriteria1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyInfraMetricThresholdCriteria1 overwrites any union data inside the ParamsPropertyInfraMetricThreshold_Criteria_Item as the provided ParamsPropertyInfraMetricThresholdCriteria1 +func (t *ParamsPropertyInfraMetricThreshold_Criteria_Item) FromParamsPropertyInfraMetricThresholdCriteria1(v ParamsPropertyInfraMetricThresholdCriteria1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyInfraMetricThresholdCriteria1 performs a merge with any union data inside the ParamsPropertyInfraMetricThreshold_Criteria_Item, using the provided ParamsPropertyInfraMetricThresholdCriteria1 +func (t *ParamsPropertyInfraMetricThreshold_Criteria_Item) MergeParamsPropertyInfraMetricThresholdCriteria1(v ParamsPropertyInfraMetricThresholdCriteria1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsParamsPropertyInfraMetricThresholdCriteria2 returns the union data inside the ParamsPropertyInfraMetricThreshold_Criteria_Item as a ParamsPropertyInfraMetricThresholdCriteria2 +func (t ParamsPropertyInfraMetricThreshold_Criteria_Item) AsParamsPropertyInfraMetricThresholdCriteria2() (ParamsPropertyInfraMetricThresholdCriteria2, error) { + var body ParamsPropertyInfraMetricThresholdCriteria2 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyInfraMetricThresholdCriteria2 overwrites any union data inside the ParamsPropertyInfraMetricThreshold_Criteria_Item as the provided ParamsPropertyInfraMetricThresholdCriteria2 +func (t *ParamsPropertyInfraMetricThreshold_Criteria_Item) FromParamsPropertyInfraMetricThresholdCriteria2(v ParamsPropertyInfraMetricThresholdCriteria2) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyInfraMetricThresholdCriteria2 performs a merge with any union data inside the ParamsPropertyInfraMetricThreshold_Criteria_Item, using the provided ParamsPropertyInfraMetricThresholdCriteria2 +func (t *ParamsPropertyInfraMetricThreshold_Criteria_Item) MergeParamsPropertyInfraMetricThresholdCriteria2(v ParamsPropertyInfraMetricThresholdCriteria2) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ParamsPropertyInfraMetricThreshold_Criteria_Item) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ParamsPropertyInfraMetricThreshold_Criteria_Item) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsParamsPropertyInfraMetricThresholdGroupBy0 returns the union data inside the ParamsPropertyInfraMetricThreshold_GroupBy as a ParamsPropertyInfraMetricThresholdGroupBy0 +func (t ParamsPropertyInfraMetricThreshold_GroupBy) AsParamsPropertyInfraMetricThresholdGroupBy0() (ParamsPropertyInfraMetricThresholdGroupBy0, error) { + var body ParamsPropertyInfraMetricThresholdGroupBy0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyInfraMetricThresholdGroupBy0 overwrites any union data inside the ParamsPropertyInfraMetricThreshold_GroupBy as the provided ParamsPropertyInfraMetricThresholdGroupBy0 +func (t *ParamsPropertyInfraMetricThreshold_GroupBy) FromParamsPropertyInfraMetricThresholdGroupBy0(v ParamsPropertyInfraMetricThresholdGroupBy0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyInfraMetricThresholdGroupBy0 performs a merge with any union data inside the ParamsPropertyInfraMetricThreshold_GroupBy, using the provided ParamsPropertyInfraMetricThresholdGroupBy0 +func (t *ParamsPropertyInfraMetricThreshold_GroupBy) MergeParamsPropertyInfraMetricThresholdGroupBy0(v ParamsPropertyInfraMetricThresholdGroupBy0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsParamsPropertyInfraMetricThresholdGroupBy1 returns the union data inside the ParamsPropertyInfraMetricThreshold_GroupBy as a ParamsPropertyInfraMetricThresholdGroupBy1 +func (t ParamsPropertyInfraMetricThreshold_GroupBy) AsParamsPropertyInfraMetricThresholdGroupBy1() (ParamsPropertyInfraMetricThresholdGroupBy1, error) { + var body ParamsPropertyInfraMetricThresholdGroupBy1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyInfraMetricThresholdGroupBy1 overwrites any union data inside the ParamsPropertyInfraMetricThreshold_GroupBy as the provided ParamsPropertyInfraMetricThresholdGroupBy1 +func (t *ParamsPropertyInfraMetricThreshold_GroupBy) FromParamsPropertyInfraMetricThresholdGroupBy1(v ParamsPropertyInfraMetricThresholdGroupBy1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyInfraMetricThresholdGroupBy1 performs a merge with any union data inside the ParamsPropertyInfraMetricThreshold_GroupBy, using the provided ParamsPropertyInfraMetricThresholdGroupBy1 +func (t *ParamsPropertyInfraMetricThreshold_GroupBy) MergeParamsPropertyInfraMetricThresholdGroupBy1(v ParamsPropertyInfraMetricThresholdGroupBy1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ParamsPropertyInfraMetricThreshold_GroupBy) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ParamsPropertyInfraMetricThreshold_GroupBy) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsParamsPropertyLogThreshold0 returns the union data inside the ParamsPropertyLogThreshold as a ParamsPropertyLogThreshold0 +func (t ParamsPropertyLogThreshold) AsParamsPropertyLogThreshold0() (ParamsPropertyLogThreshold0, error) { + var body ParamsPropertyLogThreshold0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyLogThreshold0 overwrites any union data inside the ParamsPropertyLogThreshold as the provided ParamsPropertyLogThreshold0 +func (t *ParamsPropertyLogThreshold) FromParamsPropertyLogThreshold0(v ParamsPropertyLogThreshold0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyLogThreshold0 performs a merge with any union data inside the ParamsPropertyLogThreshold, using the provided ParamsPropertyLogThreshold0 +func (t *ParamsPropertyLogThreshold) MergeParamsPropertyLogThreshold0(v ParamsPropertyLogThreshold0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsParamsPropertyLogThreshold1 returns the union data inside the ParamsPropertyLogThreshold as a ParamsPropertyLogThreshold1 +func (t ParamsPropertyLogThreshold) AsParamsPropertyLogThreshold1() (ParamsPropertyLogThreshold1, error) { + var body ParamsPropertyLogThreshold1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyLogThreshold1 overwrites any union data inside the ParamsPropertyLogThreshold as the provided ParamsPropertyLogThreshold1 +func (t *ParamsPropertyLogThreshold) FromParamsPropertyLogThreshold1(v ParamsPropertyLogThreshold1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyLogThreshold1 performs a merge with any union data inside the ParamsPropertyLogThreshold, using the provided ParamsPropertyLogThreshold1 +func (t *ParamsPropertyLogThreshold) MergeParamsPropertyLogThreshold1(v ParamsPropertyLogThreshold1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ParamsPropertyLogThreshold) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ParamsPropertyLogThreshold) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsParamsPropertyLogThreshold0CriteriaValue0 returns the union data inside the ParamsPropertyLogThreshold_0_Criteria_Value as a ParamsPropertyLogThreshold0CriteriaValue0 +func (t ParamsPropertyLogThreshold_0_Criteria_Value) AsParamsPropertyLogThreshold0CriteriaValue0() (ParamsPropertyLogThreshold0CriteriaValue0, error) { + var body ParamsPropertyLogThreshold0CriteriaValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyLogThreshold0CriteriaValue0 overwrites any union data inside the ParamsPropertyLogThreshold_0_Criteria_Value as the provided ParamsPropertyLogThreshold0CriteriaValue0 +func (t *ParamsPropertyLogThreshold_0_Criteria_Value) FromParamsPropertyLogThreshold0CriteriaValue0(v ParamsPropertyLogThreshold0CriteriaValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyLogThreshold0CriteriaValue0 performs a merge with any union data inside the ParamsPropertyLogThreshold_0_Criteria_Value, using the provided ParamsPropertyLogThreshold0CriteriaValue0 +func (t *ParamsPropertyLogThreshold_0_Criteria_Value) MergeParamsPropertyLogThreshold0CriteriaValue0(v ParamsPropertyLogThreshold0CriteriaValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsParamsPropertyLogThreshold0CriteriaValue1 returns the union data inside the ParamsPropertyLogThreshold_0_Criteria_Value as a ParamsPropertyLogThreshold0CriteriaValue1 +func (t ParamsPropertyLogThreshold_0_Criteria_Value) AsParamsPropertyLogThreshold0CriteriaValue1() (ParamsPropertyLogThreshold0CriteriaValue1, error) { + var body ParamsPropertyLogThreshold0CriteriaValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyLogThreshold0CriteriaValue1 overwrites any union data inside the ParamsPropertyLogThreshold_0_Criteria_Value as the provided ParamsPropertyLogThreshold0CriteriaValue1 +func (t *ParamsPropertyLogThreshold_0_Criteria_Value) FromParamsPropertyLogThreshold0CriteriaValue1(v ParamsPropertyLogThreshold0CriteriaValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyLogThreshold0CriteriaValue1 performs a merge with any union data inside the ParamsPropertyLogThreshold_0_Criteria_Value, using the provided ParamsPropertyLogThreshold0CriteriaValue1 +func (t *ParamsPropertyLogThreshold_0_Criteria_Value) MergeParamsPropertyLogThreshold0CriteriaValue1(v ParamsPropertyLogThreshold0CriteriaValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ParamsPropertyLogThreshold_0_Criteria_Value) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ParamsPropertyLogThreshold_0_Criteria_Value) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsParamsPropertyLogThreshold1CriteriaValue0 returns the union data inside the ParamsPropertyLogThreshold_1_Criteria_Value as a ParamsPropertyLogThreshold1CriteriaValue0 +func (t ParamsPropertyLogThreshold_1_Criteria_Value) AsParamsPropertyLogThreshold1CriteriaValue0() (ParamsPropertyLogThreshold1CriteriaValue0, error) { + var body ParamsPropertyLogThreshold1CriteriaValue0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyLogThreshold1CriteriaValue0 overwrites any union data inside the ParamsPropertyLogThreshold_1_Criteria_Value as the provided ParamsPropertyLogThreshold1CriteriaValue0 +func (t *ParamsPropertyLogThreshold_1_Criteria_Value) FromParamsPropertyLogThreshold1CriteriaValue0(v ParamsPropertyLogThreshold1CriteriaValue0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyLogThreshold1CriteriaValue0 performs a merge with any union data inside the ParamsPropertyLogThreshold_1_Criteria_Value, using the provided ParamsPropertyLogThreshold1CriteriaValue0 +func (t *ParamsPropertyLogThreshold_1_Criteria_Value) MergeParamsPropertyLogThreshold1CriteriaValue0(v ParamsPropertyLogThreshold1CriteriaValue0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsParamsPropertyLogThreshold1CriteriaValue1 returns the union data inside the ParamsPropertyLogThreshold_1_Criteria_Value as a ParamsPropertyLogThreshold1CriteriaValue1 +func (t ParamsPropertyLogThreshold_1_Criteria_Value) AsParamsPropertyLogThreshold1CriteriaValue1() (ParamsPropertyLogThreshold1CriteriaValue1, error) { + var body ParamsPropertyLogThreshold1CriteriaValue1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertyLogThreshold1CriteriaValue1 overwrites any union data inside the ParamsPropertyLogThreshold_1_Criteria_Value as the provided ParamsPropertyLogThreshold1CriteriaValue1 +func (t *ParamsPropertyLogThreshold_1_Criteria_Value) FromParamsPropertyLogThreshold1CriteriaValue1(v ParamsPropertyLogThreshold1CriteriaValue1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertyLogThreshold1CriteriaValue1 performs a merge with any union data inside the ParamsPropertyLogThreshold_1_Criteria_Value, using the provided ParamsPropertyLogThreshold1CriteriaValue1 +func (t *ParamsPropertyLogThreshold_1_Criteria_Value) MergeParamsPropertyLogThreshold1CriteriaValue1(v ParamsPropertyLogThreshold1CriteriaValue1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ParamsPropertyLogThreshold_1_Criteria_Value) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ParamsPropertyLogThreshold_1_Criteria_Value) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsParamsPropertySyntheticsMonitorStatusFilters0 returns the union data inside the ParamsPropertySyntheticsMonitorStatus_Filters as a ParamsPropertySyntheticsMonitorStatusFilters0 +func (t ParamsPropertySyntheticsMonitorStatus_Filters) AsParamsPropertySyntheticsMonitorStatusFilters0() (ParamsPropertySyntheticsMonitorStatusFilters0, error) { + var body ParamsPropertySyntheticsMonitorStatusFilters0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertySyntheticsMonitorStatusFilters0 overwrites any union data inside the ParamsPropertySyntheticsMonitorStatus_Filters as the provided ParamsPropertySyntheticsMonitorStatusFilters0 +func (t *ParamsPropertySyntheticsMonitorStatus_Filters) FromParamsPropertySyntheticsMonitorStatusFilters0(v ParamsPropertySyntheticsMonitorStatusFilters0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertySyntheticsMonitorStatusFilters0 performs a merge with any union data inside the ParamsPropertySyntheticsMonitorStatus_Filters, using the provided ParamsPropertySyntheticsMonitorStatusFilters0 +func (t *ParamsPropertySyntheticsMonitorStatus_Filters) MergeParamsPropertySyntheticsMonitorStatusFilters0(v ParamsPropertySyntheticsMonitorStatusFilters0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsParamsPropertySyntheticsMonitorStatusFilters1 returns the union data inside the ParamsPropertySyntheticsMonitorStatus_Filters as a ParamsPropertySyntheticsMonitorStatusFilters1 +func (t ParamsPropertySyntheticsMonitorStatus_Filters) AsParamsPropertySyntheticsMonitorStatusFilters1() (ParamsPropertySyntheticsMonitorStatusFilters1, error) { + var body ParamsPropertySyntheticsMonitorStatusFilters1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromParamsPropertySyntheticsMonitorStatusFilters1 overwrites any union data inside the ParamsPropertySyntheticsMonitorStatus_Filters as the provided ParamsPropertySyntheticsMonitorStatusFilters1 +func (t *ParamsPropertySyntheticsMonitorStatus_Filters) FromParamsPropertySyntheticsMonitorStatusFilters1(v ParamsPropertySyntheticsMonitorStatusFilters1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeParamsPropertySyntheticsMonitorStatusFilters1 performs a merge with any union data inside the ParamsPropertySyntheticsMonitorStatus_Filters, using the provided ParamsPropertySyntheticsMonitorStatusFilters1 +func (t *ParamsPropertySyntheticsMonitorStatus_Filters) MergeParamsPropertySyntheticsMonitorStatusFilters1(v ParamsPropertySyntheticsMonitorStatusFilters1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ParamsPropertySyntheticsMonitorStatus_Filters) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ParamsPropertySyntheticsMonitorStatus_Filters) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsRunCloseincidentSubActionParamsIncident0 returns the union data inside the RunCloseincident_SubActionParams_Incident as a RunCloseincidentSubActionParamsIncident0 +func (t RunCloseincident_SubActionParams_Incident) AsRunCloseincidentSubActionParamsIncident0() (RunCloseincidentSubActionParamsIncident0, error) { + var body RunCloseincidentSubActionParamsIncident0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromRunCloseincidentSubActionParamsIncident0 overwrites any union data inside the RunCloseincident_SubActionParams_Incident as the provided RunCloseincidentSubActionParamsIncident0 +func (t *RunCloseincident_SubActionParams_Incident) FromRunCloseincidentSubActionParamsIncident0(v RunCloseincidentSubActionParamsIncident0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeRunCloseincidentSubActionParamsIncident0 performs a merge with any union data inside the RunCloseincident_SubActionParams_Incident, using the provided RunCloseincidentSubActionParamsIncident0 +func (t *RunCloseincident_SubActionParams_Incident) MergeRunCloseincidentSubActionParamsIncident0(v RunCloseincidentSubActionParamsIncident0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsRunCloseincidentSubActionParamsIncident1 returns the union data inside the RunCloseincident_SubActionParams_Incident as a RunCloseincidentSubActionParamsIncident1 +func (t RunCloseincident_SubActionParams_Incident) AsRunCloseincidentSubActionParamsIncident1() (RunCloseincidentSubActionParamsIncident1, error) { + var body RunCloseincidentSubActionParamsIncident1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromRunCloseincidentSubActionParamsIncident1 overwrites any union data inside the RunCloseincident_SubActionParams_Incident as the provided RunCloseincidentSubActionParamsIncident1 +func (t *RunCloseincident_SubActionParams_Incident) FromRunCloseincidentSubActionParamsIncident1(v RunCloseincidentSubActionParamsIncident1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeRunCloseincidentSubActionParamsIncident1 performs a merge with any union data inside the RunCloseincident_SubActionParams_Incident, using the provided RunCloseincidentSubActionParamsIncident1 +func (t *RunCloseincident_SubActionParams_Incident) MergeRunCloseincidentSubActionParamsIncident1(v RunCloseincidentSubActionParamsIncident1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t RunCloseincident_SubActionParams_Incident) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + if err != nil { + return nil, err + } + object := make(map[string]json.RawMessage) + if t.union != nil { + err = json.Unmarshal(b, &object) + if err != nil { + return nil, err + } + } + + if t.CorrelationId != nil { + object["correlation_id"], err = json.Marshal(t.CorrelationId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'correlation_id': %w", err) + } + } + + if t.ExternalId != nil { + object["externalId"], err = json.Marshal(t.ExternalId) + if err != nil { + return nil, fmt.Errorf("error marshaling 'externalId': %w", err) + } + } + b, err = json.Marshal(object) + return b, err +} + +func (t *RunCloseincident_SubActionParams_Incident) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + if err != nil { + return err + } + object := make(map[string]json.RawMessage) + err = json.Unmarshal(b, &object) + if err != nil { + return err + } + + if raw, found := object["correlation_id"]; found { + err = json.Unmarshal(raw, &t.CorrelationId) + if err != nil { + return fmt.Errorf("error reading 'correlation_id': %w", err) + } + } + + if raw, found := object["externalId"]; found { + err = json.Unmarshal(raw, &t.ExternalId) + if err != nil { + return fmt.Errorf("error reading 'externalId': %w", err) + } + } + + return err +} + +// AsRunPushtoserviceSubActionParamsIncidentDestIp0 returns the union data inside the RunPushtoservice_SubActionParams_Incident_DestIp as a RunPushtoserviceSubActionParamsIncidentDestIp0 +func (t RunPushtoservice_SubActionParams_Incident_DestIp) AsRunPushtoserviceSubActionParamsIncidentDestIp0() (RunPushtoserviceSubActionParamsIncidentDestIp0, error) { + var body RunPushtoserviceSubActionParamsIncidentDestIp0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromRunPushtoserviceSubActionParamsIncidentDestIp0 overwrites any union data inside the RunPushtoservice_SubActionParams_Incident_DestIp as the provided RunPushtoserviceSubActionParamsIncidentDestIp0 +func (t *RunPushtoservice_SubActionParams_Incident_DestIp) FromRunPushtoserviceSubActionParamsIncidentDestIp0(v RunPushtoserviceSubActionParamsIncidentDestIp0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeRunPushtoserviceSubActionParamsIncidentDestIp0 performs a merge with any union data inside the RunPushtoservice_SubActionParams_Incident_DestIp, using the provided RunPushtoserviceSubActionParamsIncidentDestIp0 +func (t *RunPushtoservice_SubActionParams_Incident_DestIp) MergeRunPushtoserviceSubActionParamsIncidentDestIp0(v RunPushtoserviceSubActionParamsIncidentDestIp0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsRunPushtoserviceSubActionParamsIncidentDestIp1 returns the union data inside the RunPushtoservice_SubActionParams_Incident_DestIp as a RunPushtoserviceSubActionParamsIncidentDestIp1 +func (t RunPushtoservice_SubActionParams_Incident_DestIp) AsRunPushtoserviceSubActionParamsIncidentDestIp1() (RunPushtoserviceSubActionParamsIncidentDestIp1, error) { + var body RunPushtoserviceSubActionParamsIncidentDestIp1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromRunPushtoserviceSubActionParamsIncidentDestIp1 overwrites any union data inside the RunPushtoservice_SubActionParams_Incident_DestIp as the provided RunPushtoserviceSubActionParamsIncidentDestIp1 +func (t *RunPushtoservice_SubActionParams_Incident_DestIp) FromRunPushtoserviceSubActionParamsIncidentDestIp1(v RunPushtoserviceSubActionParamsIncidentDestIp1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeRunPushtoserviceSubActionParamsIncidentDestIp1 performs a merge with any union data inside the RunPushtoservice_SubActionParams_Incident_DestIp, using the provided RunPushtoserviceSubActionParamsIncidentDestIp1 +func (t *RunPushtoservice_SubActionParams_Incident_DestIp) MergeRunPushtoserviceSubActionParamsIncidentDestIp1(v RunPushtoserviceSubActionParamsIncidentDestIp1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t RunPushtoservice_SubActionParams_Incident_DestIp) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *RunPushtoservice_SubActionParams_Incident_DestIp) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsRunPushtoserviceSubActionParamsIncidentMalwareHash0 returns the union data inside the RunPushtoservice_SubActionParams_Incident_MalwareHash as a RunPushtoserviceSubActionParamsIncidentMalwareHash0 +func (t RunPushtoservice_SubActionParams_Incident_MalwareHash) AsRunPushtoserviceSubActionParamsIncidentMalwareHash0() (RunPushtoserviceSubActionParamsIncidentMalwareHash0, error) { + var body RunPushtoserviceSubActionParamsIncidentMalwareHash0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromRunPushtoserviceSubActionParamsIncidentMalwareHash0 overwrites any union data inside the RunPushtoservice_SubActionParams_Incident_MalwareHash as the provided RunPushtoserviceSubActionParamsIncidentMalwareHash0 +func (t *RunPushtoservice_SubActionParams_Incident_MalwareHash) FromRunPushtoserviceSubActionParamsIncidentMalwareHash0(v RunPushtoserviceSubActionParamsIncidentMalwareHash0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeRunPushtoserviceSubActionParamsIncidentMalwareHash0 performs a merge with any union data inside the RunPushtoservice_SubActionParams_Incident_MalwareHash, using the provided RunPushtoserviceSubActionParamsIncidentMalwareHash0 +func (t *RunPushtoservice_SubActionParams_Incident_MalwareHash) MergeRunPushtoserviceSubActionParamsIncidentMalwareHash0(v RunPushtoserviceSubActionParamsIncidentMalwareHash0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsRunPushtoserviceSubActionParamsIncidentMalwareHash1 returns the union data inside the RunPushtoservice_SubActionParams_Incident_MalwareHash as a RunPushtoserviceSubActionParamsIncidentMalwareHash1 +func (t RunPushtoservice_SubActionParams_Incident_MalwareHash) AsRunPushtoserviceSubActionParamsIncidentMalwareHash1() (RunPushtoserviceSubActionParamsIncidentMalwareHash1, error) { + var body RunPushtoserviceSubActionParamsIncidentMalwareHash1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromRunPushtoserviceSubActionParamsIncidentMalwareHash1 overwrites any union data inside the RunPushtoservice_SubActionParams_Incident_MalwareHash as the provided RunPushtoserviceSubActionParamsIncidentMalwareHash1 +func (t *RunPushtoservice_SubActionParams_Incident_MalwareHash) FromRunPushtoserviceSubActionParamsIncidentMalwareHash1(v RunPushtoserviceSubActionParamsIncidentMalwareHash1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeRunPushtoserviceSubActionParamsIncidentMalwareHash1 performs a merge with any union data inside the RunPushtoservice_SubActionParams_Incident_MalwareHash, using the provided RunPushtoserviceSubActionParamsIncidentMalwareHash1 +func (t *RunPushtoservice_SubActionParams_Incident_MalwareHash) MergeRunPushtoserviceSubActionParamsIncidentMalwareHash1(v RunPushtoserviceSubActionParamsIncidentMalwareHash1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t RunPushtoservice_SubActionParams_Incident_MalwareHash) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *RunPushtoservice_SubActionParams_Incident_MalwareHash) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsRunPushtoserviceSubActionParamsIncidentSourceIp0 returns the union data inside the RunPushtoservice_SubActionParams_Incident_SourceIp as a RunPushtoserviceSubActionParamsIncidentSourceIp0 +func (t RunPushtoservice_SubActionParams_Incident_SourceIp) AsRunPushtoserviceSubActionParamsIncidentSourceIp0() (RunPushtoserviceSubActionParamsIncidentSourceIp0, error) { + var body RunPushtoserviceSubActionParamsIncidentSourceIp0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromRunPushtoserviceSubActionParamsIncidentSourceIp0 overwrites any union data inside the RunPushtoservice_SubActionParams_Incident_SourceIp as the provided RunPushtoserviceSubActionParamsIncidentSourceIp0 +func (t *RunPushtoservice_SubActionParams_Incident_SourceIp) FromRunPushtoserviceSubActionParamsIncidentSourceIp0(v RunPushtoserviceSubActionParamsIncidentSourceIp0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeRunPushtoserviceSubActionParamsIncidentSourceIp0 performs a merge with any union data inside the RunPushtoservice_SubActionParams_Incident_SourceIp, using the provided RunPushtoserviceSubActionParamsIncidentSourceIp0 +func (t *RunPushtoservice_SubActionParams_Incident_SourceIp) MergeRunPushtoserviceSubActionParamsIncidentSourceIp0(v RunPushtoserviceSubActionParamsIncidentSourceIp0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsRunPushtoserviceSubActionParamsIncidentSourceIp1 returns the union data inside the RunPushtoservice_SubActionParams_Incident_SourceIp as a RunPushtoserviceSubActionParamsIncidentSourceIp1 +func (t RunPushtoservice_SubActionParams_Incident_SourceIp) AsRunPushtoserviceSubActionParamsIncidentSourceIp1() (RunPushtoserviceSubActionParamsIncidentSourceIp1, error) { + var body RunPushtoserviceSubActionParamsIncidentSourceIp1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromRunPushtoserviceSubActionParamsIncidentSourceIp1 overwrites any union data inside the RunPushtoservice_SubActionParams_Incident_SourceIp as the provided RunPushtoserviceSubActionParamsIncidentSourceIp1 +func (t *RunPushtoservice_SubActionParams_Incident_SourceIp) FromRunPushtoserviceSubActionParamsIncidentSourceIp1(v RunPushtoserviceSubActionParamsIncidentSourceIp1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeRunPushtoserviceSubActionParamsIncidentSourceIp1 performs a merge with any union data inside the RunPushtoservice_SubActionParams_Incident_SourceIp, using the provided RunPushtoserviceSubActionParamsIncidentSourceIp1 +func (t *RunPushtoservice_SubActionParams_Incident_SourceIp) MergeRunPushtoserviceSubActionParamsIncidentSourceIp1(v RunPushtoserviceSubActionParamsIncidentSourceIp1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t RunPushtoservice_SubActionParams_Incident_SourceIp) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *RunPushtoservice_SubActionParams_Incident_SourceIp) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsServerHostSecretsSslEsKey0 returns the union data inside the ServerHost_Secrets_Ssl_EsKey as a ServerHostSecretsSslEsKey0 +func (t ServerHost_Secrets_Ssl_EsKey) AsServerHostSecretsSslEsKey0() (ServerHostSecretsSslEsKey0, error) { + var body ServerHostSecretsSslEsKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServerHostSecretsSslEsKey0 overwrites any union data inside the ServerHost_Secrets_Ssl_EsKey as the provided ServerHostSecretsSslEsKey0 +func (t *ServerHost_Secrets_Ssl_EsKey) FromServerHostSecretsSslEsKey0(v ServerHostSecretsSslEsKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServerHostSecretsSslEsKey0 performs a merge with any union data inside the ServerHost_Secrets_Ssl_EsKey, using the provided ServerHostSecretsSslEsKey0 +func (t *ServerHost_Secrets_Ssl_EsKey) MergeServerHostSecretsSslEsKey0(v ServerHostSecretsSslEsKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServerHostSecretsSslEsKey1 returns the union data inside the ServerHost_Secrets_Ssl_EsKey as a ServerHostSecretsSslEsKey1 +func (t ServerHost_Secrets_Ssl_EsKey) AsServerHostSecretsSslEsKey1() (ServerHostSecretsSslEsKey1, error) { + var body ServerHostSecretsSslEsKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServerHostSecretsSslEsKey1 overwrites any union data inside the ServerHost_Secrets_Ssl_EsKey as the provided ServerHostSecretsSslEsKey1 +func (t *ServerHost_Secrets_Ssl_EsKey) FromServerHostSecretsSslEsKey1(v ServerHostSecretsSslEsKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServerHostSecretsSslEsKey1 performs a merge with any union data inside the ServerHost_Secrets_Ssl_EsKey, using the provided ServerHostSecretsSslEsKey1 +func (t *ServerHost_Secrets_Ssl_EsKey) MergeServerHostSecretsSslEsKey1(v ServerHostSecretsSslEsKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ServerHost_Secrets_Ssl_EsKey) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ServerHost_Secrets_Ssl_EsKey) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsServerHostSecretsSslKey0 returns the union data inside the ServerHost_Secrets_Ssl_Key as a ServerHostSecretsSslKey0 +func (t ServerHost_Secrets_Ssl_Key) AsServerHostSecretsSslKey0() (ServerHostSecretsSslKey0, error) { + var body ServerHostSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServerHostSecretsSslKey0 overwrites any union data inside the ServerHost_Secrets_Ssl_Key as the provided ServerHostSecretsSslKey0 +func (t *ServerHost_Secrets_Ssl_Key) FromServerHostSecretsSslKey0(v ServerHostSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServerHostSecretsSslKey0 performs a merge with any union data inside the ServerHost_Secrets_Ssl_Key, using the provided ServerHostSecretsSslKey0 +func (t *ServerHost_Secrets_Ssl_Key) MergeServerHostSecretsSslKey0(v ServerHostSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServerHostSecretsSslKey1 returns the union data inside the ServerHost_Secrets_Ssl_Key as a ServerHostSecretsSslKey1 +func (t ServerHost_Secrets_Ssl_Key) AsServerHostSecretsSslKey1() (ServerHostSecretsSslKey1, error) { + var body ServerHostSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServerHostSecretsSslKey1 overwrites any union data inside the ServerHost_Secrets_Ssl_Key as the provided ServerHostSecretsSslKey1 +func (t *ServerHost_Secrets_Ssl_Key) FromServerHostSecretsSslKey1(v ServerHostSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServerHostSecretsSslKey1 performs a merge with any union data inside the ServerHost_Secrets_Ssl_Key, using the provided ServerHostSecretsSslKey1 +func (t *ServerHost_Secrets_Ssl_Key) MergeServerHostSecretsSslKey1(v ServerHostSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t ServerHost_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *ServerHost_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsTermfield0 returns the union data inside the Termfield as a Termfield0 +func (t Termfield) AsTermfield0() (Termfield0, error) { + var body Termfield0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTermfield0 overwrites any union data inside the Termfield as the provided Termfield0 +func (t *Termfield) FromTermfield0(v Termfield0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTermfield0 performs a merge with any union data inside the Termfield, using the provided Termfield0 +func (t *Termfield) MergeTermfield0(v Termfield0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTermfield1 returns the union data inside the Termfield as a Termfield1 +func (t Termfield) AsTermfield1() (Termfield1, error) { + var body Termfield1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTermfield1 overwrites any union data inside the Termfield as the provided Termfield1 +func (t *Termfield) FromTermfield1(v Termfield1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTermfield1 performs a merge with any union data inside the Termfield, using the provided Termfield1 +func (t *Termfield) MergeTermfield1(v Termfield1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t Termfield) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *Termfield) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsBedrockConfig returns the union data inside the UpdateConnectorConfig as a BedrockConfig +func (t UpdateConnectorConfig) AsBedrockConfig() (BedrockConfig, error) { + var body BedrockConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromBedrockConfig overwrites any union data inside the UpdateConnectorConfig as the provided BedrockConfig +func (t *UpdateConnectorConfig) FromBedrockConfig(v BedrockConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeBedrockConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided BedrockConfig +func (t *UpdateConnectorConfig) MergeBedrockConfig(v BedrockConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCrowdstrikeConfig returns the union data inside the UpdateConnectorConfig as a CrowdstrikeConfig +func (t UpdateConnectorConfig) AsCrowdstrikeConfig() (CrowdstrikeConfig, error) { + var body CrowdstrikeConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCrowdstrikeConfig overwrites any union data inside the UpdateConnectorConfig as the provided CrowdstrikeConfig +func (t *UpdateConnectorConfig) FromCrowdstrikeConfig(v CrowdstrikeConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCrowdstrikeConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided CrowdstrikeConfig +func (t *UpdateConnectorConfig) MergeCrowdstrikeConfig(v CrowdstrikeConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsD3securityConfig returns the union data inside the UpdateConnectorConfig as a D3securityConfig +func (t UpdateConnectorConfig) AsD3securityConfig() (D3securityConfig, error) { + var body D3securityConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromD3securityConfig overwrites any union data inside the UpdateConnectorConfig as the provided D3securityConfig +func (t *UpdateConnectorConfig) FromD3securityConfig(v D3securityConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeD3securityConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided D3securityConfig +func (t *UpdateConnectorConfig) MergeD3securityConfig(v D3securityConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsEmailConfig returns the union data inside the UpdateConnectorConfig as a EmailConfig +func (t UpdateConnectorConfig) AsEmailConfig() (EmailConfig, error) { + var body EmailConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromEmailConfig overwrites any union data inside the UpdateConnectorConfig as the provided EmailConfig +func (t *UpdateConnectorConfig) FromEmailConfig(v EmailConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeEmailConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided EmailConfig +func (t *UpdateConnectorConfig) MergeEmailConfig(v EmailConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGeminiConfig returns the union data inside the UpdateConnectorConfig as a GeminiConfig +func (t UpdateConnectorConfig) AsGeminiConfig() (GeminiConfig, error) { + var body GeminiConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGeminiConfig overwrites any union data inside the UpdateConnectorConfig as the provided GeminiConfig +func (t *UpdateConnectorConfig) FromGeminiConfig(v GeminiConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGeminiConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided GeminiConfig +func (t *UpdateConnectorConfig) MergeGeminiConfig(v GeminiConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsResilientConfig returns the union data inside the UpdateConnectorConfig as a ResilientConfig +func (t UpdateConnectorConfig) AsResilientConfig() (ResilientConfig, error) { + var body ResilientConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromResilientConfig overwrites any union data inside the UpdateConnectorConfig as the provided ResilientConfig +func (t *UpdateConnectorConfig) FromResilientConfig(v ResilientConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeResilientConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ResilientConfig +func (t *UpdateConnectorConfig) MergeResilientConfig(v ResilientConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsIndexConfig returns the union data inside the UpdateConnectorConfig as a IndexConfig +func (t UpdateConnectorConfig) AsIndexConfig() (IndexConfig, error) { + var body IndexConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromIndexConfig overwrites any union data inside the UpdateConnectorConfig as the provided IndexConfig +func (t *UpdateConnectorConfig) FromIndexConfig(v IndexConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeIndexConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided IndexConfig +func (t *UpdateConnectorConfig) MergeIndexConfig(v IndexConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsJiraConfig returns the union data inside the UpdateConnectorConfig as a JiraConfig +func (t UpdateConnectorConfig) AsJiraConfig() (JiraConfig, error) { + var body JiraConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromJiraConfig overwrites any union data inside the UpdateConnectorConfig as the provided JiraConfig +func (t *UpdateConnectorConfig) FromJiraConfig(v JiraConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeJiraConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided JiraConfig +func (t *UpdateConnectorConfig) MergeJiraConfig(v JiraConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsDefenderConfig returns the union data inside the UpdateConnectorConfig as a DefenderConfig +func (t UpdateConnectorConfig) AsDefenderConfig() (DefenderConfig, error) { + var body DefenderConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromDefenderConfig overwrites any union data inside the UpdateConnectorConfig as the provided DefenderConfig +func (t *UpdateConnectorConfig) FromDefenderConfig(v DefenderConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeDefenderConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided DefenderConfig +func (t *UpdateConnectorConfig) MergeDefenderConfig(v DefenderConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGenaiAzureConfig returns the union data inside the UpdateConnectorConfig as a GenaiAzureConfig +func (t UpdateConnectorConfig) AsGenaiAzureConfig() (GenaiAzureConfig, error) { + var body GenaiAzureConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGenaiAzureConfig overwrites any union data inside the UpdateConnectorConfig as the provided GenaiAzureConfig +func (t *UpdateConnectorConfig) FromGenaiAzureConfig(v GenaiAzureConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGenaiAzureConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided GenaiAzureConfig +func (t *UpdateConnectorConfig) MergeGenaiAzureConfig(v GenaiAzureConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGenaiOpenaiConfig returns the union data inside the UpdateConnectorConfig as a GenaiOpenaiConfig +func (t UpdateConnectorConfig) AsGenaiOpenaiConfig() (GenaiOpenaiConfig, error) { + var body GenaiOpenaiConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGenaiOpenaiConfig overwrites any union data inside the UpdateConnectorConfig as the provided GenaiOpenaiConfig +func (t *UpdateConnectorConfig) FromGenaiOpenaiConfig(v GenaiOpenaiConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGenaiOpenaiConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided GenaiOpenaiConfig +func (t *UpdateConnectorConfig) MergeGenaiOpenaiConfig(v GenaiOpenaiConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOpsgenieConfig returns the union data inside the UpdateConnectorConfig as a OpsgenieConfig +func (t UpdateConnectorConfig) AsOpsgenieConfig() (OpsgenieConfig, error) { + var body OpsgenieConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOpsgenieConfig overwrites any union data inside the UpdateConnectorConfig as the provided OpsgenieConfig +func (t *UpdateConnectorConfig) FromOpsgenieConfig(v OpsgenieConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOpsgenieConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided OpsgenieConfig +func (t *UpdateConnectorConfig) MergeOpsgenieConfig(v OpsgenieConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPagerdutyConfig returns the union data inside the UpdateConnectorConfig as a PagerdutyConfig +func (t UpdateConnectorConfig) AsPagerdutyConfig() (PagerdutyConfig, error) { + var body PagerdutyConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPagerdutyConfig overwrites any union data inside the UpdateConnectorConfig as the provided PagerdutyConfig +func (t *UpdateConnectorConfig) FromPagerdutyConfig(v PagerdutyConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePagerdutyConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided PagerdutyConfig +func (t *UpdateConnectorConfig) MergePagerdutyConfig(v PagerdutyConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSentineloneConfig returns the union data inside the UpdateConnectorConfig as a SentineloneConfig +func (t UpdateConnectorConfig) AsSentineloneConfig() (SentineloneConfig, error) { + var body SentineloneConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSentineloneConfig overwrites any union data inside the UpdateConnectorConfig as the provided SentineloneConfig +func (t *UpdateConnectorConfig) FromSentineloneConfig(v SentineloneConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSentineloneConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided SentineloneConfig +func (t *UpdateConnectorConfig) MergeSentineloneConfig(v SentineloneConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServicenowConfig returns the union data inside the UpdateConnectorConfig as a ServicenowConfig +func (t UpdateConnectorConfig) AsServicenowConfig() (ServicenowConfig, error) { + var body ServicenowConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServicenowConfig overwrites any union data inside the UpdateConnectorConfig as the provided ServicenowConfig +func (t *UpdateConnectorConfig) FromServicenowConfig(v ServicenowConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServicenowConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ServicenowConfig +func (t *UpdateConnectorConfig) MergeServicenowConfig(v ServicenowConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServicenowItomConfig returns the union data inside the UpdateConnectorConfig as a ServicenowItomConfig +func (t UpdateConnectorConfig) AsServicenowItomConfig() (ServicenowItomConfig, error) { + var body ServicenowItomConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServicenowItomConfig overwrites any union data inside the UpdateConnectorConfig as the provided ServicenowItomConfig +func (t *UpdateConnectorConfig) FromServicenowItomConfig(v ServicenowItomConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServicenowItomConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ServicenowItomConfig +func (t *UpdateConnectorConfig) MergeServicenowItomConfig(v ServicenowItomConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSlackApiConfig returns the union data inside the UpdateConnectorConfig as a SlackApiConfig +func (t UpdateConnectorConfig) AsSlackApiConfig() (SlackApiConfig, error) { + var body SlackApiConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSlackApiConfig overwrites any union data inside the UpdateConnectorConfig as the provided SlackApiConfig +func (t *UpdateConnectorConfig) FromSlackApiConfig(v SlackApiConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSlackApiConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided SlackApiConfig +func (t *UpdateConnectorConfig) MergeSlackApiConfig(v SlackApiConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSwimlaneConfig returns the union data inside the UpdateConnectorConfig as a SwimlaneConfig +func (t UpdateConnectorConfig) AsSwimlaneConfig() (SwimlaneConfig, error) { + var body SwimlaneConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSwimlaneConfig overwrites any union data inside the UpdateConnectorConfig as the provided SwimlaneConfig +func (t *UpdateConnectorConfig) FromSwimlaneConfig(v SwimlaneConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSwimlaneConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided SwimlaneConfig +func (t *UpdateConnectorConfig) MergeSwimlaneConfig(v SwimlaneConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsThehiveConfig returns the union data inside the UpdateConnectorConfig as a ThehiveConfig +func (t UpdateConnectorConfig) AsThehiveConfig() (ThehiveConfig, error) { + var body ThehiveConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromThehiveConfig overwrites any union data inside the UpdateConnectorConfig as the provided ThehiveConfig +func (t *UpdateConnectorConfig) FromThehiveConfig(v ThehiveConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeThehiveConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ThehiveConfig +func (t *UpdateConnectorConfig) MergeThehiveConfig(v ThehiveConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTinesConfig returns the union data inside the UpdateConnectorConfig as a TinesConfig +func (t UpdateConnectorConfig) AsTinesConfig() (TinesConfig, error) { + var body TinesConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTinesConfig overwrites any union data inside the UpdateConnectorConfig as the provided TinesConfig +func (t *UpdateConnectorConfig) FromTinesConfig(v TinesConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTinesConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided TinesConfig +func (t *UpdateConnectorConfig) MergeTinesConfig(v TinesConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTorqConfig returns the union data inside the UpdateConnectorConfig as a TorqConfig +func (t UpdateConnectorConfig) AsTorqConfig() (TorqConfig, error) { + var body TorqConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTorqConfig overwrites any union data inside the UpdateConnectorConfig as the provided TorqConfig +func (t *UpdateConnectorConfig) FromTorqConfig(v TorqConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTorqConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided TorqConfig +func (t *UpdateConnectorConfig) MergeTorqConfig(v TorqConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsWebhookConfig returns the union data inside the UpdateConnectorConfig as a WebhookConfig +func (t UpdateConnectorConfig) AsWebhookConfig() (WebhookConfig, error) { + var body WebhookConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromWebhookConfig overwrites any union data inside the UpdateConnectorConfig as the provided WebhookConfig +func (t *UpdateConnectorConfig) FromWebhookConfig(v WebhookConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeWebhookConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided WebhookConfig +func (t *UpdateConnectorConfig) MergeWebhookConfig(v WebhookConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesWebhookConfig returns the union data inside the UpdateConnectorConfig as a CasesWebhookConfig +func (t UpdateConnectorConfig) AsCasesWebhookConfig() (CasesWebhookConfig, error) { + var body CasesWebhookConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesWebhookConfig overwrites any union data inside the UpdateConnectorConfig as the provided CasesWebhookConfig +func (t *UpdateConnectorConfig) FromCasesWebhookConfig(v CasesWebhookConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesWebhookConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided CasesWebhookConfig +func (t *UpdateConnectorConfig) MergeCasesWebhookConfig(v CasesWebhookConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsXmattersConfig returns the union data inside the UpdateConnectorConfig as a XmattersConfig +func (t UpdateConnectorConfig) AsXmattersConfig() (XmattersConfig, error) { + var body XmattersConfig + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromXmattersConfig overwrites any union data inside the UpdateConnectorConfig as the provided XmattersConfig +func (t *UpdateConnectorConfig) FromXmattersConfig(v XmattersConfig) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeXmattersConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided XmattersConfig +func (t *UpdateConnectorConfig) MergeXmattersConfig(v XmattersConfig) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsBedrockSecrets returns the union data inside the UpdateConnectorSecrets as a BedrockSecrets +func (t UpdateConnectorSecrets) AsBedrockSecrets() (BedrockSecrets, error) { + var body BedrockSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromBedrockSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided BedrockSecrets +func (t *UpdateConnectorSecrets) FromBedrockSecrets(v BedrockSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeBedrockSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided BedrockSecrets +func (t *UpdateConnectorSecrets) MergeBedrockSecrets(v BedrockSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCrowdstrikeSecrets returns the union data inside the UpdateConnectorSecrets as a CrowdstrikeSecrets +func (t UpdateConnectorSecrets) AsCrowdstrikeSecrets() (CrowdstrikeSecrets, error) { + var body CrowdstrikeSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCrowdstrikeSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided CrowdstrikeSecrets +func (t *UpdateConnectorSecrets) FromCrowdstrikeSecrets(v CrowdstrikeSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCrowdstrikeSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided CrowdstrikeSecrets +func (t *UpdateConnectorSecrets) MergeCrowdstrikeSecrets(v CrowdstrikeSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsD3securitySecrets returns the union data inside the UpdateConnectorSecrets as a D3securitySecrets +func (t UpdateConnectorSecrets) AsD3securitySecrets() (D3securitySecrets, error) { + var body D3securitySecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromD3securitySecrets overwrites any union data inside the UpdateConnectorSecrets as the provided D3securitySecrets +func (t *UpdateConnectorSecrets) FromD3securitySecrets(v D3securitySecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeD3securitySecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided D3securitySecrets +func (t *UpdateConnectorSecrets) MergeD3securitySecrets(v D3securitySecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsEmailSecrets returns the union data inside the UpdateConnectorSecrets as a EmailSecrets +func (t UpdateConnectorSecrets) AsEmailSecrets() (EmailSecrets, error) { + var body EmailSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromEmailSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided EmailSecrets +func (t *UpdateConnectorSecrets) FromEmailSecrets(v EmailSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeEmailSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided EmailSecrets +func (t *UpdateConnectorSecrets) MergeEmailSecrets(v EmailSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGeminiSecrets returns the union data inside the UpdateConnectorSecrets as a GeminiSecrets +func (t UpdateConnectorSecrets) AsGeminiSecrets() (GeminiSecrets, error) { + var body GeminiSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGeminiSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided GeminiSecrets +func (t *UpdateConnectorSecrets) FromGeminiSecrets(v GeminiSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGeminiSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided GeminiSecrets +func (t *UpdateConnectorSecrets) MergeGeminiSecrets(v GeminiSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsResilientSecrets returns the union data inside the UpdateConnectorSecrets as a ResilientSecrets +func (t UpdateConnectorSecrets) AsResilientSecrets() (ResilientSecrets, error) { + var body ResilientSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromResilientSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided ResilientSecrets +func (t *UpdateConnectorSecrets) FromResilientSecrets(v ResilientSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeResilientSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided ResilientSecrets +func (t *UpdateConnectorSecrets) MergeResilientSecrets(v ResilientSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsJiraSecrets returns the union data inside the UpdateConnectorSecrets as a JiraSecrets +func (t UpdateConnectorSecrets) AsJiraSecrets() (JiraSecrets, error) { + var body JiraSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromJiraSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided JiraSecrets +func (t *UpdateConnectorSecrets) FromJiraSecrets(v JiraSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeJiraSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided JiraSecrets +func (t *UpdateConnectorSecrets) MergeJiraSecrets(v JiraSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTeamsSecrets returns the union data inside the UpdateConnectorSecrets as a TeamsSecrets +func (t UpdateConnectorSecrets) AsTeamsSecrets() (TeamsSecrets, error) { + var body TeamsSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTeamsSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided TeamsSecrets +func (t *UpdateConnectorSecrets) FromTeamsSecrets(v TeamsSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTeamsSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided TeamsSecrets +func (t *UpdateConnectorSecrets) MergeTeamsSecrets(v TeamsSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsGenaiSecrets returns the union data inside the UpdateConnectorSecrets as a GenaiSecrets +func (t UpdateConnectorSecrets) AsGenaiSecrets() (GenaiSecrets, error) { + var body GenaiSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromGenaiSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided GenaiSecrets +func (t *UpdateConnectorSecrets) FromGenaiSecrets(v GenaiSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeGenaiSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided GenaiSecrets +func (t *UpdateConnectorSecrets) MergeGenaiSecrets(v GenaiSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsOpsgenieSecrets returns the union data inside the UpdateConnectorSecrets as a OpsgenieSecrets +func (t UpdateConnectorSecrets) AsOpsgenieSecrets() (OpsgenieSecrets, error) { + var body OpsgenieSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromOpsgenieSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided OpsgenieSecrets +func (t *UpdateConnectorSecrets) FromOpsgenieSecrets(v OpsgenieSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeOpsgenieSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided OpsgenieSecrets +func (t *UpdateConnectorSecrets) MergeOpsgenieSecrets(v OpsgenieSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsPagerdutySecrets returns the union data inside the UpdateConnectorSecrets as a PagerdutySecrets +func (t UpdateConnectorSecrets) AsPagerdutySecrets() (PagerdutySecrets, error) { + var body PagerdutySecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromPagerdutySecrets overwrites any union data inside the UpdateConnectorSecrets as the provided PagerdutySecrets +func (t *UpdateConnectorSecrets) FromPagerdutySecrets(v PagerdutySecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergePagerdutySecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided PagerdutySecrets +func (t *UpdateConnectorSecrets) MergePagerdutySecrets(v PagerdutySecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSentineloneSecrets returns the union data inside the UpdateConnectorSecrets as a SentineloneSecrets +func (t UpdateConnectorSecrets) AsSentineloneSecrets() (SentineloneSecrets, error) { + var body SentineloneSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSentineloneSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided SentineloneSecrets +func (t *UpdateConnectorSecrets) FromSentineloneSecrets(v SentineloneSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSentineloneSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided SentineloneSecrets +func (t *UpdateConnectorSecrets) MergeSentineloneSecrets(v SentineloneSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsServicenowSecrets returns the union data inside the UpdateConnectorSecrets as a ServicenowSecrets +func (t UpdateConnectorSecrets) AsServicenowSecrets() (ServicenowSecrets, error) { + var body ServicenowSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromServicenowSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided ServicenowSecrets +func (t *UpdateConnectorSecrets) FromServicenowSecrets(v ServicenowSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeServicenowSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided ServicenowSecrets +func (t *UpdateConnectorSecrets) MergeServicenowSecrets(v ServicenowSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSlackApiSecrets returns the union data inside the UpdateConnectorSecrets as a SlackApiSecrets +func (t UpdateConnectorSecrets) AsSlackApiSecrets() (SlackApiSecrets, error) { + var body SlackApiSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSlackApiSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided SlackApiSecrets +func (t *UpdateConnectorSecrets) FromSlackApiSecrets(v SlackApiSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSlackApiSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided SlackApiSecrets +func (t *UpdateConnectorSecrets) MergeSlackApiSecrets(v SlackApiSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsSwimlaneSecrets returns the union data inside the UpdateConnectorSecrets as a SwimlaneSecrets +func (t UpdateConnectorSecrets) AsSwimlaneSecrets() (SwimlaneSecrets, error) { + var body SwimlaneSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromSwimlaneSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided SwimlaneSecrets +func (t *UpdateConnectorSecrets) FromSwimlaneSecrets(v SwimlaneSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeSwimlaneSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided SwimlaneSecrets +func (t *UpdateConnectorSecrets) MergeSwimlaneSecrets(v SwimlaneSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsThehiveSecrets returns the union data inside the UpdateConnectorSecrets as a ThehiveSecrets +func (t UpdateConnectorSecrets) AsThehiveSecrets() (ThehiveSecrets, error) { + var body ThehiveSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromThehiveSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided ThehiveSecrets +func (t *UpdateConnectorSecrets) FromThehiveSecrets(v ThehiveSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeThehiveSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided ThehiveSecrets +func (t *UpdateConnectorSecrets) MergeThehiveSecrets(v ThehiveSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTinesSecrets returns the union data inside the UpdateConnectorSecrets as a TinesSecrets +func (t UpdateConnectorSecrets) AsTinesSecrets() (TinesSecrets, error) { + var body TinesSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTinesSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided TinesSecrets +func (t *UpdateConnectorSecrets) FromTinesSecrets(v TinesSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTinesSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided TinesSecrets +func (t *UpdateConnectorSecrets) MergeTinesSecrets(v TinesSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsTorqSecrets returns the union data inside the UpdateConnectorSecrets as a TorqSecrets +func (t UpdateConnectorSecrets) AsTorqSecrets() (TorqSecrets, error) { + var body TorqSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromTorqSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided TorqSecrets +func (t *UpdateConnectorSecrets) FromTorqSecrets(v TorqSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeTorqSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided TorqSecrets +func (t *UpdateConnectorSecrets) MergeTorqSecrets(v TorqSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsWebhookSecrets returns the union data inside the UpdateConnectorSecrets as a WebhookSecrets +func (t UpdateConnectorSecrets) AsWebhookSecrets() (WebhookSecrets, error) { + var body WebhookSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromWebhookSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided WebhookSecrets +func (t *UpdateConnectorSecrets) FromWebhookSecrets(v WebhookSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeWebhookSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided WebhookSecrets +func (t *UpdateConnectorSecrets) MergeWebhookSecrets(v WebhookSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesWebhookSecrets returns the union data inside the UpdateConnectorSecrets as a CasesWebhookSecrets +func (t UpdateConnectorSecrets) AsCasesWebhookSecrets() (CasesWebhookSecrets, error) { + var body CasesWebhookSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesWebhookSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided CasesWebhookSecrets +func (t *UpdateConnectorSecrets) FromCasesWebhookSecrets(v CasesWebhookSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesWebhookSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided CasesWebhookSecrets +func (t *UpdateConnectorSecrets) MergeCasesWebhookSecrets(v CasesWebhookSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsXmattersSecrets returns the union data inside the UpdateConnectorSecrets as a XmattersSecrets +func (t UpdateConnectorSecrets) AsXmattersSecrets() (XmattersSecrets, error) { + var body XmattersSecrets + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromXmattersSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided XmattersSecrets +func (t *UpdateConnectorSecrets) FromXmattersSecrets(v XmattersSecrets) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeXmattersSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided XmattersSecrets +func (t *UpdateConnectorSecrets) MergeXmattersSecrets(v XmattersSecrets) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsUpdateOutputElasticsearchSecretsSslKey0 returns the union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key as a UpdateOutputElasticsearchSecretsSslKey0 +func (t UpdateOutputElasticsearch_Secrets_Ssl_Key) AsUpdateOutputElasticsearchSecretsSslKey0() (UpdateOutputElasticsearchSecretsSslKey0, error) { + var body UpdateOutputElasticsearchSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputElasticsearchSecretsSslKey0 overwrites any union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key as the provided UpdateOutputElasticsearchSecretsSslKey0 +func (t *UpdateOutputElasticsearch_Secrets_Ssl_Key) FromUpdateOutputElasticsearchSecretsSslKey0(v UpdateOutputElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputElasticsearchSecretsSslKey0 performs a merge with any union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key, using the provided UpdateOutputElasticsearchSecretsSslKey0 +func (t *UpdateOutputElasticsearch_Secrets_Ssl_Key) MergeUpdateOutputElasticsearchSecretsSslKey0(v UpdateOutputElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsUpdateOutputElasticsearchSecretsSslKey1 returns the union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key as a UpdateOutputElasticsearchSecretsSslKey1 +func (t UpdateOutputElasticsearch_Secrets_Ssl_Key) AsUpdateOutputElasticsearchSecretsSslKey1() (UpdateOutputElasticsearchSecretsSslKey1, error) { + var body UpdateOutputElasticsearchSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputElasticsearchSecretsSslKey1 overwrites any union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key as the provided UpdateOutputElasticsearchSecretsSslKey1 +func (t *UpdateOutputElasticsearch_Secrets_Ssl_Key) FromUpdateOutputElasticsearchSecretsSslKey1(v UpdateOutputElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputElasticsearchSecretsSslKey1 performs a merge with any union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key, using the provided UpdateOutputElasticsearchSecretsSslKey1 +func (t *UpdateOutputElasticsearch_Secrets_Ssl_Key) MergeUpdateOutputElasticsearchSecretsSslKey1(v UpdateOutputElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t UpdateOutputElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *UpdateOutputElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsUpdateOutputKafkaSecretsPassword0 returns the union data inside the UpdateOutputKafka_Secrets_Password as a UpdateOutputKafkaSecretsPassword0 +func (t UpdateOutputKafka_Secrets_Password) AsUpdateOutputKafkaSecretsPassword0() (UpdateOutputKafkaSecretsPassword0, error) { + var body UpdateOutputKafkaSecretsPassword0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputKafkaSecretsPassword0 overwrites any union data inside the UpdateOutputKafka_Secrets_Password as the provided UpdateOutputKafkaSecretsPassword0 +func (t *UpdateOutputKafka_Secrets_Password) FromUpdateOutputKafkaSecretsPassword0(v UpdateOutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputKafkaSecretsPassword0 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Password, using the provided UpdateOutputKafkaSecretsPassword0 +func (t *UpdateOutputKafka_Secrets_Password) MergeUpdateOutputKafkaSecretsPassword0(v UpdateOutputKafkaSecretsPassword0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsUpdateOutputKafkaSecretsPassword1 returns the union data inside the UpdateOutputKafka_Secrets_Password as a UpdateOutputKafkaSecretsPassword1 +func (t UpdateOutputKafka_Secrets_Password) AsUpdateOutputKafkaSecretsPassword1() (UpdateOutputKafkaSecretsPassword1, error) { + var body UpdateOutputKafkaSecretsPassword1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputKafkaSecretsPassword1 overwrites any union data inside the UpdateOutputKafka_Secrets_Password as the provided UpdateOutputKafkaSecretsPassword1 +func (t *UpdateOutputKafka_Secrets_Password) FromUpdateOutputKafkaSecretsPassword1(v UpdateOutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputKafkaSecretsPassword1 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Password, using the provided UpdateOutputKafkaSecretsPassword1 +func (t *UpdateOutputKafka_Secrets_Password) MergeUpdateOutputKafkaSecretsPassword1(v UpdateOutputKafkaSecretsPassword1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t UpdateOutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *UpdateOutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsUpdateOutputKafkaSecretsSslKey0 returns the union data inside the UpdateOutputKafka_Secrets_Ssl_Key as a UpdateOutputKafkaSecretsSslKey0 +func (t UpdateOutputKafka_Secrets_Ssl_Key) AsUpdateOutputKafkaSecretsSslKey0() (UpdateOutputKafkaSecretsSslKey0, error) { + var body UpdateOutputKafkaSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputKafkaSecretsSslKey0 overwrites any union data inside the UpdateOutputKafka_Secrets_Ssl_Key as the provided UpdateOutputKafkaSecretsSslKey0 +func (t *UpdateOutputKafka_Secrets_Ssl_Key) FromUpdateOutputKafkaSecretsSslKey0(v UpdateOutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputKafkaSecretsSslKey0 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Ssl_Key, using the provided UpdateOutputKafkaSecretsSslKey0 +func (t *UpdateOutputKafka_Secrets_Ssl_Key) MergeUpdateOutputKafkaSecretsSslKey0(v UpdateOutputKafkaSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsUpdateOutputKafkaSecretsSslKey1 returns the union data inside the UpdateOutputKafka_Secrets_Ssl_Key as a UpdateOutputKafkaSecretsSslKey1 +func (t UpdateOutputKafka_Secrets_Ssl_Key) AsUpdateOutputKafkaSecretsSslKey1() (UpdateOutputKafkaSecretsSslKey1, error) { + var body UpdateOutputKafkaSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputKafkaSecretsSslKey1 overwrites any union data inside the UpdateOutputKafka_Secrets_Ssl_Key as the provided UpdateOutputKafkaSecretsSslKey1 +func (t *UpdateOutputKafka_Secrets_Ssl_Key) FromUpdateOutputKafkaSecretsSslKey1(v UpdateOutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputKafkaSecretsSslKey1 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Ssl_Key, using the provided UpdateOutputKafkaSecretsSslKey1 +func (t *UpdateOutputKafka_Secrets_Ssl_Key) MergeUpdateOutputKafkaSecretsSslKey1(v UpdateOutputKafkaSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t UpdateOutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *UpdateOutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsUpdateOutputLogstashSecretsSslKey0 returns the union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as a UpdateOutputLogstashSecretsSslKey0 +func (t UpdateOutputLogstash_Secrets_Ssl_Key) AsUpdateOutputLogstashSecretsSslKey0() (UpdateOutputLogstashSecretsSslKey0, error) { + var body UpdateOutputLogstashSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputLogstashSecretsSslKey0 overwrites any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as the provided UpdateOutputLogstashSecretsSslKey0 +func (t *UpdateOutputLogstash_Secrets_Ssl_Key) FromUpdateOutputLogstashSecretsSslKey0(v UpdateOutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputLogstashSecretsSslKey0 performs a merge with any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key, using the provided UpdateOutputLogstashSecretsSslKey0 +func (t *UpdateOutputLogstash_Secrets_Ssl_Key) MergeUpdateOutputLogstashSecretsSslKey0(v UpdateOutputLogstashSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsUpdateOutputLogstashSecretsSslKey1 returns the union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as a UpdateOutputLogstashSecretsSslKey1 +func (t UpdateOutputLogstash_Secrets_Ssl_Key) AsUpdateOutputLogstashSecretsSslKey1() (UpdateOutputLogstashSecretsSslKey1, error) { + var body UpdateOutputLogstashSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputLogstashSecretsSslKey1 overwrites any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as the provided UpdateOutputLogstashSecretsSslKey1 +func (t *UpdateOutputLogstash_Secrets_Ssl_Key) FromUpdateOutputLogstashSecretsSslKey1(v UpdateOutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputLogstashSecretsSslKey1 performs a merge with any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key, using the provided UpdateOutputLogstashSecretsSslKey1 +func (t *UpdateOutputLogstash_Secrets_Ssl_Key) MergeUpdateOutputLogstashSecretsSslKey1(v UpdateOutputLogstashSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t UpdateOutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *UpdateOutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsUpdateOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as a UpdateOutputRemoteElasticsearchSecretsServiceToken0 +func (t UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) AsUpdateOutputRemoteElasticsearchSecretsServiceToken0() (UpdateOutputRemoteElasticsearchSecretsServiceToken0, error) { + var body UpdateOutputRemoteElasticsearchSecretsServiceToken0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as the provided UpdateOutputRemoteElasticsearchSecretsServiceToken0 +func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) FromUpdateOutputRemoteElasticsearchSecretsServiceToken0(v UpdateOutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided UpdateOutputRemoteElasticsearchSecretsServiceToken0 +func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) MergeUpdateOutputRemoteElasticsearchSecretsServiceToken0(v UpdateOutputRemoteElasticsearchSecretsServiceToken0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsUpdateOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as a UpdateOutputRemoteElasticsearchSecretsServiceToken1 +func (t UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) AsUpdateOutputRemoteElasticsearchSecretsServiceToken1() (UpdateOutputRemoteElasticsearchSecretsServiceToken1, error) { + var body UpdateOutputRemoteElasticsearchSecretsServiceToken1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as the provided UpdateOutputRemoteElasticsearchSecretsServiceToken1 +func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) FromUpdateOutputRemoteElasticsearchSecretsServiceToken1(v UpdateOutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided UpdateOutputRemoteElasticsearchSecretsServiceToken1 +func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) MergeUpdateOutputRemoteElasticsearchSecretsServiceToken1(v UpdateOutputRemoteElasticsearchSecretsServiceToken1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsUpdateOutputRemoteElasticsearchSecretsSslKey0 returns the union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key as a UpdateOutputRemoteElasticsearchSecretsSslKey0 +func (t UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) AsUpdateOutputRemoteElasticsearchSecretsSslKey0() (UpdateOutputRemoteElasticsearchSecretsSslKey0, error) { + var body UpdateOutputRemoteElasticsearchSecretsSslKey0 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputRemoteElasticsearchSecretsSslKey0 overwrites any union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided UpdateOutputRemoteElasticsearchSecretsSslKey0 +func (t *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) FromUpdateOutputRemoteElasticsearchSecretsSslKey0(v UpdateOutputRemoteElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputRemoteElasticsearchSecretsSslKey0 performs a merge with any union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided UpdateOutputRemoteElasticsearchSecretsSslKey0 +func (t *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeUpdateOutputRemoteElasticsearchSecretsSslKey0(v UpdateOutputRemoteElasticsearchSecretsSslKey0) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsUpdateOutputRemoteElasticsearchSecretsSslKey1 returns the union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key as a UpdateOutputRemoteElasticsearchSecretsSslKey1 +func (t UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) AsUpdateOutputRemoteElasticsearchSecretsSslKey1() (UpdateOutputRemoteElasticsearchSecretsSslKey1, error) { + var body UpdateOutputRemoteElasticsearchSecretsSslKey1 + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputRemoteElasticsearchSecretsSslKey1 overwrites any union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided UpdateOutputRemoteElasticsearchSecretsSslKey1 +func (t *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) FromUpdateOutputRemoteElasticsearchSecretsSslKey1(v UpdateOutputRemoteElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputRemoteElasticsearchSecretsSslKey1 performs a merge with any union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided UpdateOutputRemoteElasticsearchSecretsSslKey1 +func (t *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeUpdateOutputRemoteElasticsearchSecretsSslKey1(v UpdateOutputRemoteElasticsearchSecretsSslKey1) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsUpdateOutputElasticsearch returns the union data inside the UpdateOutputUnion as a UpdateOutputElasticsearch +func (t UpdateOutputUnion) AsUpdateOutputElasticsearch() (UpdateOutputElasticsearch, error) { + var body UpdateOutputElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputElasticsearch overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputElasticsearch +func (t *UpdateOutputUnion) FromUpdateOutputElasticsearch(v UpdateOutputElasticsearch) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputElasticsearch performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputElasticsearch +func (t *UpdateOutputUnion) MergeUpdateOutputElasticsearch(v UpdateOutputElasticsearch) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsUpdateOutputRemoteElasticsearch returns the union data inside the UpdateOutputUnion as a UpdateOutputRemoteElasticsearch +func (t UpdateOutputUnion) AsUpdateOutputRemoteElasticsearch() (UpdateOutputRemoteElasticsearch, error) { + var body UpdateOutputRemoteElasticsearch + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputRemoteElasticsearch overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputRemoteElasticsearch +func (t *UpdateOutputUnion) FromUpdateOutputRemoteElasticsearch(v UpdateOutputRemoteElasticsearch) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputRemoteElasticsearch performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputRemoteElasticsearch +func (t *UpdateOutputUnion) MergeUpdateOutputRemoteElasticsearch(v UpdateOutputRemoteElasticsearch) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsUpdateOutputLogstash returns the union data inside the UpdateOutputUnion as a UpdateOutputLogstash +func (t UpdateOutputUnion) AsUpdateOutputLogstash() (UpdateOutputLogstash, error) { + var body UpdateOutputLogstash + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputLogstash overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputLogstash +func (t *UpdateOutputUnion) FromUpdateOutputLogstash(v UpdateOutputLogstash) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputLogstash performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputLogstash +func (t *UpdateOutputUnion) MergeUpdateOutputLogstash(v UpdateOutputLogstash) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsUpdateOutputKafka returns the union data inside the UpdateOutputUnion as a UpdateOutputKafka +func (t UpdateOutputUnion) AsUpdateOutputKafka() (UpdateOutputKafka, error) { + var body UpdateOutputKafka + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromUpdateOutputKafka overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputKafka +func (t *UpdateOutputUnion) FromUpdateOutputKafka(v UpdateOutputKafka) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeUpdateOutputKafka performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputKafka +func (t *UpdateOutputUnion) MergeUpdateOutputKafka(v UpdateOutputKafka) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t UpdateOutputUnion) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *UpdateOutputUnion) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesString returns the union data inside the CasesAssigneesFilter as a CasesString +func (t CasesAssigneesFilter) AsCasesString() (CasesString, error) { + var body CasesString + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesString overwrites any union data inside the CasesAssigneesFilter as the provided CasesString +func (t *CasesAssigneesFilter) FromCasesString(v CasesString) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesString performs a merge with any union data inside the CasesAssigneesFilter, using the provided CasesString +func (t *CasesAssigneesFilter) MergeCasesString(v CasesString) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesStringArray returns the union data inside the CasesAssigneesFilter as a CasesStringArray +func (t CasesAssigneesFilter) AsCasesStringArray() (CasesStringArray, error) { + var body CasesStringArray + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesStringArray overwrites any union data inside the CasesAssigneesFilter as the provided CasesStringArray +func (t *CasesAssigneesFilter) FromCasesStringArray(v CasesStringArray) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesStringArray performs a merge with any union data inside the CasesAssigneesFilter, using the provided CasesStringArray +func (t *CasesAssigneesFilter) MergeCasesStringArray(v CasesStringArray) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesAssigneesFilter) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesAssigneesFilter) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesCaseCategory returns the union data inside the CasesCategory as a CasesCaseCategory +func (t CasesCategory) AsCasesCaseCategory() (CasesCaseCategory, error) { + var body CasesCaseCategory + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesCaseCategory overwrites any union data inside the CasesCategory as the provided CasesCaseCategory +func (t *CasesCategory) FromCasesCaseCategory(v CasesCaseCategory) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesCaseCategory performs a merge with any union data inside the CasesCategory, using the provided CasesCaseCategory +func (t *CasesCategory) MergeCasesCaseCategory(v CasesCaseCategory) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesCaseCategories returns the union data inside the CasesCategory as a CasesCaseCategories +func (t CasesCategory) AsCasesCaseCategories() (CasesCaseCategories, error) { + var body CasesCaseCategories + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesCaseCategories overwrites any union data inside the CasesCategory as the provided CasesCaseCategories +func (t *CasesCategory) FromCasesCaseCategories(v CasesCaseCategories) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesCaseCategories performs a merge with any union data inside the CasesCategory, using the provided CasesCaseCategories +func (t *CasesCategory) MergeCasesCaseCategories(v CasesCaseCategories) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesCategory) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesCategory) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesOwner returns the union data inside the CasesOwnerFilter as a CasesOwner +func (t CasesOwnerFilter) AsCasesOwner() (CasesOwner, error) { + var body CasesOwner + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesOwner overwrites any union data inside the CasesOwnerFilter as the provided CasesOwner +func (t *CasesOwnerFilter) FromCasesOwner(v CasesOwner) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesOwner performs a merge with any union data inside the CasesOwnerFilter, using the provided CasesOwner +func (t *CasesOwnerFilter) MergeCasesOwner(v CasesOwner) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesOwners returns the union data inside the CasesOwnerFilter as a CasesOwners +func (t CasesOwnerFilter) AsCasesOwners() (CasesOwners, error) { + var body CasesOwners + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesOwners overwrites any union data inside the CasesOwnerFilter as the provided CasesOwners +func (t *CasesOwnerFilter) FromCasesOwners(v CasesOwners) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesOwners performs a merge with any union data inside the CasesOwnerFilter, using the provided CasesOwners +func (t *CasesOwnerFilter) MergeCasesOwners(v CasesOwners) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesOwnerFilter) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesOwnerFilter) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesString returns the union data inside the CasesReporters as a CasesString +func (t CasesReporters) AsCasesString() (CasesString, error) { + var body CasesString + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesString overwrites any union data inside the CasesReporters as the provided CasesString +func (t *CasesReporters) FromCasesString(v CasesString) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesString performs a merge with any union data inside the CasesReporters, using the provided CasesString +func (t *CasesReporters) MergeCasesString(v CasesString) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesStringArray returns the union data inside the CasesReporters as a CasesStringArray +func (t CasesReporters) AsCasesStringArray() (CasesStringArray, error) { + var body CasesStringArray + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesStringArray overwrites any union data inside the CasesReporters as the provided CasesStringArray +func (t *CasesReporters) FromCasesStringArray(v CasesStringArray) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesStringArray performs a merge with any union data inside the CasesReporters, using the provided CasesStringArray +func (t *CasesReporters) MergeCasesStringArray(v CasesStringArray) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesReporters) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesReporters) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesSearchFieldsType returns the union data inside the CasesSearchFields as a CasesSearchFieldsType +func (t CasesSearchFields) AsCasesSearchFieldsType() (CasesSearchFieldsType, error) { + var body CasesSearchFieldsType + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesSearchFieldsType overwrites any union data inside the CasesSearchFields as the provided CasesSearchFieldsType +func (t *CasesSearchFields) FromCasesSearchFieldsType(v CasesSearchFieldsType) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesSearchFieldsType performs a merge with any union data inside the CasesSearchFields, using the provided CasesSearchFieldsType +func (t *CasesSearchFields) MergeCasesSearchFieldsType(v CasesSearchFieldsType) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesSearchFieldsTypeArray returns the union data inside the CasesSearchFields as a CasesSearchFieldsTypeArray +func (t CasesSearchFields) AsCasesSearchFieldsTypeArray() (CasesSearchFieldsTypeArray, error) { + var body CasesSearchFieldsTypeArray + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesSearchFieldsTypeArray overwrites any union data inside the CasesSearchFields as the provided CasesSearchFieldsTypeArray +func (t *CasesSearchFields) FromCasesSearchFieldsTypeArray(v CasesSearchFieldsTypeArray) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesSearchFieldsTypeArray performs a merge with any union data inside the CasesSearchFields, using the provided CasesSearchFieldsTypeArray +func (t *CasesSearchFields) MergeCasesSearchFieldsTypeArray(v CasesSearchFieldsTypeArray) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesSearchFields) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesSearchFields) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// AsCasesString returns the union data inside the CasesTags as a CasesString +func (t CasesTags) AsCasesString() (CasesString, error) { + var body CasesString + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesString overwrites any union data inside the CasesTags as the provided CasesString +func (t *CasesTags) FromCasesString(v CasesString) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesString performs a merge with any union data inside the CasesTags, using the provided CasesString +func (t *CasesTags) MergeCasesString(v CasesString) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +// AsCasesStringArray returns the union data inside the CasesTags as a CasesStringArray +func (t CasesTags) AsCasesStringArray() (CasesStringArray, error) { + var body CasesStringArray + err := json.Unmarshal(t.union, &body) + return body, err +} + +// FromCasesStringArray overwrites any union data inside the CasesTags as the provided CasesStringArray +func (t *CasesTags) FromCasesStringArray(v CasesStringArray) error { + b, err := json.Marshal(v) + t.union = b + return err +} + +// MergeCasesStringArray performs a merge with any union data inside the CasesTags, using the provided CasesStringArray +func (t *CasesTags) MergeCasesStringArray(v CasesStringArray) error { + b, err := json.Marshal(v) + if err != nil { + return err + } + + merged, err := runtime.JSONMerge(t.union, b) + t.union = merged + return err +} + +func (t CasesTags) MarshalJSON() ([]byte, error) { + b, err := t.union.MarshalJSON() + return b, err +} + +func (t *CasesTags) UnmarshalJSON(b []byte) error { + err := t.union.UnmarshalJSON(b) + return err +} + +// Override default JSON handling for CreateConnectorConfig to handle AdditionalProperties and union +func (a *CreateConnectorConfig) UnmarshalJSON(b []byte) error { + err := a.union.UnmarshalJSON(b) + if err != nil { + return err + } + object := make(map[string]json.RawMessage) + err = json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for CreateConnectorConfig to handle AdditionalProperties and union +func (a CreateConnectorConfig) MarshalJSON() ([]byte, error) { + var err error + b, err := a.union.MarshalJSON() + if err != nil { + return nil, err + } + object := make(map[string]json.RawMessage) + if a.union != nil { + err = json.Unmarshal(b, &object) + if err != nil { + return nil, err + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Override default JSON handling for CreateConnectorSecrets to handle AdditionalProperties and union +func (a *CreateConnectorSecrets) UnmarshalJSON(b []byte) error { + err := a.union.UnmarshalJSON(b) + if err != nil { + return err + } + object := make(map[string]json.RawMessage) + err = json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for CreateConnectorSecrets to handle AdditionalProperties and union +func (a CreateConnectorSecrets) MarshalJSON() ([]byte, error) { + var err error + b, err := a.union.MarshalJSON() + if err != nil { + return nil, err + } + object := make(map[string]json.RawMessage) + if a.union != nil { + err = json.Unmarshal(b, &object) + if err != nil { + return nil, err + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Override default JSON handling for UpdateConnectorConfig to handle AdditionalProperties and union +func (a *UpdateConnectorConfig) UnmarshalJSON(b []byte) error { + err := a.union.UnmarshalJSON(b) + if err != nil { + return err + } + object := make(map[string]json.RawMessage) + err = json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for UpdateConnectorConfig to handle AdditionalProperties and union +func (a UpdateConnectorConfig) MarshalJSON() ([]byte, error) { + var err error + b, err := a.union.MarshalJSON() + if err != nil { + return nil, err + } + object := make(map[string]json.RawMessage) + if a.union != nil { + err = json.Unmarshal(b, &object) + if err != nil { + return nil, err + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// Override default JSON handling for UpdateConnectorSecrets to handle AdditionalProperties and union +func (a *UpdateConnectorSecrets) UnmarshalJSON(b []byte) error { + err := a.union.UnmarshalJSON(b) + if err != nil { + return err + } + object := make(map[string]json.RawMessage) + err = json.Unmarshal(b, &object) + if err != nil { + return err + } + + if len(object) != 0 { + a.AdditionalProperties = make(map[string]interface{}) + for fieldName, fieldBuf := range object { + var fieldVal interface{} + err := json.Unmarshal(fieldBuf, &fieldVal) + if err != nil { + return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) + } + a.AdditionalProperties[fieldName] = fieldVal + } + } + return nil +} + +// Override default JSON handling for UpdateConnectorSecrets to handle AdditionalProperties and union +func (a UpdateConnectorSecrets) MarshalJSON() ([]byte, error) { + var err error + b, err := a.union.MarshalJSON() + if err != nil { + return nil, err + } + object := make(map[string]json.RawMessage) + if a.union != nil { + err = json.Unmarshal(b, &object) + if err != nil { + return nil, err + } + } + + for fieldName, field := range a.AdditionalProperties { + object[fieldName], err = json.Marshal(field) + if err != nil { + return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + } + } + return json.Marshal(object) +} + +// RequestEditorFn is the function signature for the RequestEditor callback function +type RequestEditorFn func(ctx context.Context, req *http.Request) error + +// Doer performs HTTP requests. +// +// The standard http.Client implements this interface. +type HttpRequestDoer interface { + Do(req *http.Request) (*http.Response, error) +} + +// Client which conforms to the OpenAPI3 specification for this service. +type Client struct { + // The endpoint of the server conforming to this interface, with scheme, + // https://api.deepmap.com for example. This can contain a path relative + // to the server, such as https://api.deepmap.com/dev-test, and all the + // paths in the swagger spec will be appended to the server. + Server string + + // Doer for performing requests, typically a *http.Client with any + // customized settings, such as certificate chains. + Client HttpRequestDoer + + // A list of callbacks for modifying requests which are generated before sending over + // the network. + RequestEditors []RequestEditorFn +} + +// ClientOption allows setting custom parameters during construction +type ClientOption func(*Client) error + +// Creates a new Client, with reasonable defaults +func NewClient(server string, opts ...ClientOption) (*Client, error) { + // create a client with sane default values + client := Client{ + Server: server, + } + // mutate client and add all optional params + for _, o := range opts { + if err := o(&client); err != nil { + return nil, err + } + } + // ensure the server URL always has a trailing slash + if !strings.HasSuffix(client.Server, "/") { + client.Server += "/" + } + // create httpClient, if not already present + if client.Client == nil { + client.Client = &http.Client{} + } + return &client, nil +} + +// WithHTTPClient allows overriding the default Doer, which is +// automatically created using http.Client. This is useful for tests. +func WithHTTPClient(doer HttpRequestDoer) ClientOption { + return func(c *Client) error { + c.Client = doer + return nil + } +} + +// WithRequestEditorFn allows setting up a callback function, which will be +// called right before sending the request. This can be used to mutate the request. +func WithRequestEditorFn(fn RequestEditorFn) ClientOption { + return func(c *Client) error { + c.RequestEditors = append(c.RequestEditors, fn) + return nil + } +} + +// The interface specification for the client above. +type ClientInterface interface { + // PostActionsConnectorIdExecuteWithBody request with any body + PostActionsConnectorIdExecuteWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostActionsConnectorIdExecute(ctx context.Context, id string, body PostActionsConnectorIdExecuteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetActionsConnectorTypes request + GetActionsConnectorTypes(ctx context.Context, params *GetActionsConnectorTypesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetAlertingHealth request + GetAlertingHealth(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteAlertingRuleId request + DeleteAlertingRuleId(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetAlertingRuleId request + GetAlertingRuleId(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostAlertingRuleIdWithBody request with any body + PostAlertingRuleIdWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostAlertingRuleId(ctx context.Context, id string, body PostAlertingRuleIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutAlertingRuleIdWithBody request with any body + PutAlertingRuleIdWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutAlertingRuleId(ctx context.Context, id string, body PutAlertingRuleIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostAlertingRuleIdDisableWithBody request with any body + PostAlertingRuleIdDisableWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostAlertingRuleIdDisable(ctx context.Context, id string, body PostAlertingRuleIdDisableJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostAlertingRuleIdEnable request + PostAlertingRuleIdEnable(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostAlertingRuleIdMuteAll request + PostAlertingRuleIdMuteAll(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostAlertingRuleIdUnmuteAll request + PostAlertingRuleIdUnmuteAll(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostAlertingRuleIdUpdateApiKey request + PostAlertingRuleIdUpdateApiKey(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostAlertingRuleIdSnoozeScheduleWithBody request with any body + PostAlertingRuleIdSnoozeScheduleWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostAlertingRuleIdSnoozeSchedule(ctx context.Context, id string, body PostAlertingRuleIdSnoozeScheduleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteAlertingRuleRuleidSnoozeScheduleScheduleid request + DeleteAlertingRuleRuleidSnoozeScheduleScheduleid(ctx context.Context, ruleId string, scheduleId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostAlertingRuleRuleIdAlertAlertIdMute request + PostAlertingRuleRuleIdAlertAlertIdMute(ctx context.Context, ruleId string, alertId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostAlertingRuleRuleIdAlertAlertIdUnmute request + PostAlertingRuleRuleIdAlertAlertIdUnmute(ctx context.Context, ruleId string, alertId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetRuleTypes request + GetRuleTypes(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetAlertingRulesFind request + GetAlertingRulesFind(ctx context.Context, params *GetAlertingRulesFindParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateAgentKeyWithBody request with any body + CreateAgentKeyWithBody(ctx context.Context, params *CreateAgentKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateAgentKey(ctx context.Context, params *CreateAgentKeyParams, body CreateAgentKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // SaveApmServerSchemaWithBody request with any body + SaveApmServerSchemaWithBody(ctx context.Context, params *SaveApmServerSchemaParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + SaveApmServerSchema(ctx context.Context, params *SaveApmServerSchemaParams, body SaveApmServerSchemaJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateAnnotationWithBody request with any body + CreateAnnotationWithBody(ctx context.Context, serviceName string, params *CreateAnnotationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateAnnotation(ctx context.Context, serviceName string, params *CreateAnnotationParams, body CreateAnnotationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetAnnotation request + GetAnnotation(ctx context.Context, serviceName string, params *GetAnnotationParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteAgentConfigurationWithBody request with any body + DeleteAgentConfigurationWithBody(ctx context.Context, params *DeleteAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteAgentConfiguration(ctx context.Context, params *DeleteAgentConfigurationParams, body DeleteAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetAgentConfigurations request + GetAgentConfigurations(ctx context.Context, params *GetAgentConfigurationsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateUpdateAgentConfigurationWithBody request with any body + CreateUpdateAgentConfigurationWithBody(ctx context.Context, params *CreateUpdateAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateUpdateAgentConfiguration(ctx context.Context, params *CreateUpdateAgentConfigurationParams, body CreateUpdateAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetAgentNameForService request + GetAgentNameForService(ctx context.Context, params *GetAgentNameForServiceParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetEnvironmentsForService request + GetEnvironmentsForService(ctx context.Context, params *GetEnvironmentsForServiceParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // SearchSingleConfigurationWithBody request with any body + SearchSingleConfigurationWithBody(ctx context.Context, params *SearchSingleConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + SearchSingleConfiguration(ctx context.Context, params *SearchSingleConfigurationParams, body SearchSingleConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetSingleAgentConfiguration request + GetSingleAgentConfiguration(ctx context.Context, params *GetSingleAgentConfigurationParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetSourceMaps request + GetSourceMaps(ctx context.Context, params *GetSourceMapsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UploadSourceMapWithBody request with any body + UploadSourceMapWithBody(ctx context.Context, params *UploadSourceMapParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteSourceMap request + DeleteSourceMap(ctx context.Context, id string, params *DeleteSourceMapParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteAssetCriticalityRecord request + DeleteAssetCriticalityRecord(ctx context.Context, params *DeleteAssetCriticalityRecordParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetAssetCriticalityRecord request + GetAssetCriticalityRecord(ctx context.Context, params *GetAssetCriticalityRecordParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateAssetCriticalityRecordWithBody request with any body + CreateAssetCriticalityRecordWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateAssetCriticalityRecord(ctx context.Context, body CreateAssetCriticalityRecordJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // BulkUpsertAssetCriticalityRecordsWithBody request with any body + BulkUpsertAssetCriticalityRecordsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + BulkUpsertAssetCriticalityRecords(ctx context.Context, body BulkUpsertAssetCriticalityRecordsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindAssetCriticalityRecords request + FindAssetCriticalityRecords(ctx context.Context, params *FindAssetCriticalityRecordsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteCaseDefaultSpace request + DeleteCaseDefaultSpace(ctx context.Context, params *DeleteCaseDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateCaseDefaultSpaceWithBody request with any body + UpdateCaseDefaultSpaceWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateCaseDefaultSpace(ctx context.Context, body UpdateCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateCaseDefaultSpaceWithBody request with any body + CreateCaseDefaultSpaceWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateCaseDefaultSpace(ctx context.Context, body CreateCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindCasesDefaultSpace request + FindCasesDefaultSpace(ctx context.Context, params *FindCasesDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetCasesByAlertDefaultSpace request + GetCasesByAlertDefaultSpace(ctx context.Context, alertId CasesAlertId, params *GetCasesByAlertDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetCaseConfigurationDefaultSpace request + GetCaseConfigurationDefaultSpace(ctx context.Context, params *GetCaseConfigurationDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // SetCaseConfigurationDefaultSpaceWithBody request with any body + SetCaseConfigurationDefaultSpaceWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + SetCaseConfigurationDefaultSpace(ctx context.Context, body SetCaseConfigurationDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindCaseConnectorsDefaultSpace request + FindCaseConnectorsDefaultSpace(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateCaseConfigurationDefaultSpaceWithBody request with any body + UpdateCaseConfigurationDefaultSpaceWithBody(ctx context.Context, configurationId CasesConfigurationId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateCaseConfigurationDefaultSpace(ctx context.Context, configurationId CasesConfigurationId, body UpdateCaseConfigurationDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetCaseReportersDefaultSpace request + GetCaseReportersDefaultSpace(ctx context.Context, params *GetCaseReportersDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetCaseTagsDefaultSpace request + GetCaseTagsDefaultSpace(ctx context.Context, params *GetCaseTagsDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetCaseDefaultSpace request + GetCaseDefaultSpace(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetCaseAlertsDefaultSpace request + GetCaseAlertsDefaultSpace(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteCaseCommentsDefaultSpace request + DeleteCaseCommentsDefaultSpace(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateCaseCommentDefaultSpaceWithBody request with any body + UpdateCaseCommentDefaultSpaceWithBody(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateCaseCommentDefaultSpace(ctx context.Context, caseId CasesCaseId, body UpdateCaseCommentDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // AddCaseCommentDefaultSpaceWithBody request with any body + AddCaseCommentDefaultSpaceWithBody(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + AddCaseCommentDefaultSpace(ctx context.Context, caseId CasesCaseId, body AddCaseCommentDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindCaseCommentsDefaultSpace request + FindCaseCommentsDefaultSpace(ctx context.Context, caseId CasesCaseId, params *FindCaseCommentsDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteCaseCommentDefaultSpace request + DeleteCaseCommentDefaultSpace(ctx context.Context, caseId CasesCaseId, commentId CasesCommentId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetCaseCommentDefaultSpace request + GetCaseCommentDefaultSpace(ctx context.Context, caseId CasesCaseId, commentId CasesCommentId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PushCaseDefaultSpaceWithBody request with any body + PushCaseDefaultSpaceWithBody(ctx context.Context, caseId CasesCaseId, connectorId CasesConnectorId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PushCaseDefaultSpace(ctx context.Context, caseId CasesCaseId, connectorId CasesConnectorId, body PushCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // AddCaseFileDefaultSpaceWithBody request with any body + AddCaseFileDefaultSpaceWithBody(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindCaseActivityDefaultSpace request + FindCaseActivityDefaultSpace(ctx context.Context, caseId CasesCaseId, params *FindCaseActivityDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateFieldsMetadataDefaultWithBody request with any body + UpdateFieldsMetadataDefaultWithBody(ctx context.Context, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateFieldsMetadataDefault(ctx context.Context, viewId DataViewsViewId, body UpdateFieldsMetadataDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateRuntimeFieldDefaultWithBody request with any body + CreateRuntimeFieldDefaultWithBody(ctx context.Context, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateRuntimeFieldDefault(ctx context.Context, viewId DataViewsViewId, body CreateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateUpdateRuntimeFieldDefaultWithBody request with any body + CreateUpdateRuntimeFieldDefaultWithBody(ctx context.Context, viewId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateUpdateRuntimeFieldDefault(ctx context.Context, viewId string, body CreateUpdateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteRuntimeFieldDefault request + DeleteRuntimeFieldDefault(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetRuntimeFieldDefault request + GetRuntimeFieldDefault(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateRuntimeFieldDefaultWithBody request with any body + UpdateRuntimeFieldDefaultWithBody(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateRuntimeFieldDefault(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, body UpdateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetDefaultDataViewDefault request + GetDefaultDataViewDefault(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // SetDefaultDatailViewDefaultWithBody request with any body + SetDefaultDatailViewDefaultWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + SetDefaultDatailViewDefault(ctx context.Context, body SetDefaultDatailViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // SwapDataViewsDefaultWithBody request with any body + SwapDataViewsDefaultWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + SwapDataViewsDefault(ctx context.Context, body SwapDataViewsDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PreviewSwapDataViewsDefaultWithBody request with any body + PreviewSwapDataViewsDefaultWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PreviewSwapDataViewsDefault(ctx context.Context, body PreviewSwapDataViewsDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteAlertsIndex request + DeleteAlertsIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadAlertsIndex request + ReadAlertsIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateAlertsIndex request + CreateAlertsIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadPrivileges request + ReadPrivileges(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteRule request + DeleteRule(ctx context.Context, params *DeleteRuleParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadRule request + ReadRule(ctx context.Context, params *ReadRuleParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PatchRuleWithBody request with any body + PatchRuleWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PatchRule(ctx context.Context, body PatchRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateRuleWithBody request with any body + CreateRuleWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateRule(ctx context.Context, body CreateRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateRuleWithBody request with any body + UpdateRuleWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateRule(ctx context.Context, body UpdateRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PerformRulesBulkActionWithBody request with any body + PerformRulesBulkActionWithBody(ctx context.Context, params *PerformRulesBulkActionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PerformRulesBulkAction(ctx context.Context, params *PerformRulesBulkActionParams, body PerformRulesBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ExportRulesWithBody request with any body + ExportRulesWithBody(ctx context.Context, params *ExportRulesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + ExportRules(ctx context.Context, params *ExportRulesParams, body ExportRulesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindRules request + FindRules(ctx context.Context, params *FindRulesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ImportRulesWithBody request with any body + ImportRulesWithBody(ctx context.Context, params *ImportRulesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // InstallPrebuiltRulesAndTimelines request + InstallPrebuiltRulesAndTimelines(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadPrebuiltRulesAndTimelinesStatus request + ReadPrebuiltRulesAndTimelinesStatus(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // RulePreviewWithBody request with any body + RulePreviewWithBody(ctx context.Context, params *RulePreviewParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + RulePreview(ctx context.Context, params *RulePreviewParams, body RulePreviewJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateRuleExceptionListItemsWithBody request with any body + CreateRuleExceptionListItemsWithBody(ctx context.Context, id SecurityExceptionsAPIRuleId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateRuleExceptionListItems(ctx context.Context, id SecurityExceptionsAPIRuleId, body CreateRuleExceptionListItemsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // SetAlertAssigneesWithBody request with any body + SetAlertAssigneesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + SetAlertAssignees(ctx context.Context, body SetAlertAssigneesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FinalizeAlertsMigrationWithBody request with any body + FinalizeAlertsMigrationWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + FinalizeAlertsMigration(ctx context.Context, body FinalizeAlertsMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // AlertsMigrationCleanupWithBody request with any body + AlertsMigrationCleanupWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + AlertsMigrationCleanup(ctx context.Context, body AlertsMigrationCleanupJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateAlertsMigrationWithBody request with any body + CreateAlertsMigrationWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateAlertsMigration(ctx context.Context, body CreateAlertsMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadAlertsMigrationStatus request + ReadAlertsMigrationStatus(ctx context.Context, params *ReadAlertsMigrationStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // SearchAlertsWithBody request with any body + SearchAlertsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + SearchAlerts(ctx context.Context, body SearchAlertsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // SetAlertsStatusWithBody request with any body + SetAlertsStatusWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + SetAlertsStatus(ctx context.Context, body SetAlertsStatusJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // SetAlertTagsWithBody request with any body + SetAlertTagsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + SetAlertTags(ctx context.Context, body SetAlertTagsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadTags request + ReadTags(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // RotateEncryptionKey request + RotateEncryptionKey(ctx context.Context, params *RotateEncryptionKeyParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointGetActionsList request + EndpointGetActionsList(ctx context.Context, params *EndpointGetActionsListParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointExecuteActionWithBody request with any body + EndpointExecuteActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + EndpointExecuteAction(ctx context.Context, body EndpointExecuteActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointGetFileActionWithBody request with any body + EndpointGetFileActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + EndpointGetFileAction(ctx context.Context, body EndpointGetFileActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointIsolateActionWithBody request with any body + EndpointIsolateActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + EndpointIsolateAction(ctx context.Context, body EndpointIsolateActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointKillProcessActionWithBody request with any body + EndpointKillProcessActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + EndpointKillProcessAction(ctx context.Context, body EndpointKillProcessActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointGetProcessesActionWithBody request with any body + EndpointGetProcessesActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + EndpointGetProcessesAction(ctx context.Context, body EndpointGetProcessesActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // RunScriptActionWithBody request with any body + RunScriptActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + RunScriptAction(ctx context.Context, body RunScriptActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointScanActionWithBody request with any body + EndpointScanActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + EndpointScanAction(ctx context.Context, body EndpointScanActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointGetActionsState request + EndpointGetActionsState(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointSuspendProcessActionWithBody request with any body + EndpointSuspendProcessActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + EndpointSuspendProcessAction(ctx context.Context, body EndpointSuspendProcessActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointUnisolateActionWithBody request with any body + EndpointUnisolateActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + EndpointUnisolateAction(ctx context.Context, body EndpointUnisolateActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointUploadActionWithBody request with any body + EndpointUploadActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointGetActionsDetails request + EndpointGetActionsDetails(ctx context.Context, actionId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointFileInfo request + EndpointFileInfo(ctx context.Context, actionId string, fileId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointFileDownload request + EndpointFileDownload(ctx context.Context, actionId string, fileId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EndpointGetActionsStatus request + EndpointGetActionsStatus(ctx context.Context, params *EndpointGetActionsStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetEndpointMetadataList request + GetEndpointMetadataList(ctx context.Context, params *GetEndpointMetadataListParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetEndpointMetadata request + GetEndpointMetadata(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetPolicyResponse request + GetPolicyResponse(ctx context.Context, params *GetPolicyResponseParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetProtectionUpdatesNote request + GetProtectionUpdatesNote(ctx context.Context, packagePolicyId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateUpdateProtectionUpdatesNoteWithBody request with any body + CreateUpdateProtectionUpdatesNoteWithBody(ctx context.Context, packagePolicyId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateUpdateProtectionUpdatesNote(ctx context.Context, packagePolicyId string, body CreateUpdateProtectionUpdatesNoteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateEndpointList request + CreateEndpointList(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteEndpointListItem request + DeleteEndpointListItem(ctx context.Context, params *DeleteEndpointListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadEndpointListItem request + ReadEndpointListItem(ctx context.Context, params *ReadEndpointListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateEndpointListItemWithBody request with any body + CreateEndpointListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateEndpointListItem(ctx context.Context, body CreateEndpointListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateEndpointListItemWithBody request with any body + UpdateEndpointListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateEndpointListItem(ctx context.Context, body UpdateEndpointListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindEndpointListItems request + FindEndpointListItems(ctx context.Context, params *FindEndpointListItemsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteMonitoringEngine request + DeleteMonitoringEngine(ctx context.Context, params *DeleteMonitoringEngineParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DisableMonitoringEngine request + DisableMonitoringEngine(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // InitMonitoringEngine request + InitMonitoringEngine(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ScheduleMonitoringEngine request + ScheduleMonitoringEngine(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PrivMonHealth request + PrivMonHealth(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PrivMonPrivileges request + PrivMonPrivileges(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreatePrivMonUserWithBody request with any body + CreatePrivMonUserWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreatePrivMonUser(ctx context.Context, body CreatePrivMonUserJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PrivmonBulkUploadUsersCSVWithBody request with any body + PrivmonBulkUploadUsersCSVWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ListPrivMonUsers request + ListPrivMonUsers(ctx context.Context, params *ListPrivMonUsersParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeletePrivMonUser request + DeletePrivMonUser(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdatePrivMonUserWithBody request with any body + UpdatePrivMonUserWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdatePrivMonUser(ctx context.Context, id string, body UpdatePrivMonUserJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // InstallPrivilegedAccessDetectionPackage request + InstallPrivilegedAccessDetectionPackage(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetPrivilegedAccessDetectionPackageStatus request + GetPrivilegedAccessDetectionPackageStatus(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // InitEntityStoreWithBody request with any body + InitEntityStoreWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + InitEntityStore(ctx context.Context, body InitEntityStoreJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ListEntityEngines request + ListEntityEngines(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ApplyEntityEngineDataviewIndices request + ApplyEntityEngineDataviewIndices(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteEntityEngine request + DeleteEntityEngine(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, params *DeleteEntityEngineParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetEntityEngine request + GetEntityEngine(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*http.Response, error) + + // InitEntityEngineWithBody request with any body + InitEntityEngineWithBody(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + InitEntityEngine(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, body InitEntityEngineJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // StartEntityEngine request + StartEntityEngine(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*http.Response, error) + + // StopEntityEngine request + StopEntityEngine(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ListEntities request + ListEntities(ctx context.Context, params *ListEntitiesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetEntityStoreStatus request + GetEntityStoreStatus(ctx context.Context, params *GetEntityStoreStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteExceptionList request + DeleteExceptionList(ctx context.Context, params *DeleteExceptionListParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadExceptionList request + ReadExceptionList(ctx context.Context, params *ReadExceptionListParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateExceptionListWithBody request with any body + CreateExceptionListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateExceptionList(ctx context.Context, body CreateExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateExceptionListWithBody request with any body + UpdateExceptionListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateExceptionList(ctx context.Context, body UpdateExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DuplicateExceptionList request + DuplicateExceptionList(ctx context.Context, params *DuplicateExceptionListParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ExportExceptionList request + ExportExceptionList(ctx context.Context, params *ExportExceptionListParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindExceptionLists request + FindExceptionLists(ctx context.Context, params *FindExceptionListsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ImportExceptionListWithBody request with any body + ImportExceptionListWithBody(ctx context.Context, params *ImportExceptionListParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteExceptionListItem request + DeleteExceptionListItem(ctx context.Context, params *DeleteExceptionListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadExceptionListItem request + ReadExceptionListItem(ctx context.Context, params *ReadExceptionListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateExceptionListItemWithBody request with any body + CreateExceptionListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateExceptionListItem(ctx context.Context, body CreateExceptionListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateExceptionListItemWithBody request with any body + UpdateExceptionListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateExceptionListItem(ctx context.Context, body UpdateExceptionListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindExceptionListItems request + FindExceptionListItems(ctx context.Context, params *FindExceptionListItemsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadExceptionListSummary request + ReadExceptionListSummary(ctx context.Context, params *ReadExceptionListSummaryParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateSharedExceptionListWithBody request with any body + CreateSharedExceptionListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateSharedExceptionList(ctx context.Context, body CreateSharedExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFeatures request + GetFeatures(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentDownloadSources request + GetFleetAgentDownloadSources(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentDownloadSourcesWithBody request with any body + PostFleetAgentDownloadSourcesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentDownloadSources(ctx context.Context, body PostFleetAgentDownloadSourcesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteFleetAgentDownloadSourcesSourceid request + DeleteFleetAgentDownloadSourcesSourceid(ctx context.Context, sourceId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentDownloadSourcesSourceid request + GetFleetAgentDownloadSourcesSourceid(ctx context.Context, sourceId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutFleetAgentDownloadSourcesSourceidWithBody request with any body + PutFleetAgentDownloadSourcesSourceidWithBody(ctx context.Context, sourceId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutFleetAgentDownloadSourcesSourceid(ctx context.Context, sourceId string, body PutFleetAgentDownloadSourcesSourceidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentPolicies request + GetFleetAgentPolicies(ctx context.Context, params *GetFleetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentPoliciesWithBody request with any body + PostFleetAgentPoliciesWithBody(ctx context.Context, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentPolicies(ctx context.Context, params *PostFleetAgentPoliciesParams, body PostFleetAgentPoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentPoliciesBulkGetWithBody request with any body + PostFleetAgentPoliciesBulkGetWithBody(ctx context.Context, params *PostFleetAgentPoliciesBulkGetParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentPoliciesBulkGet(ctx context.Context, params *PostFleetAgentPoliciesBulkGetParams, body PostFleetAgentPoliciesBulkGetJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentPoliciesDeleteWithBody request with any body + PostFleetAgentPoliciesDeleteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentPoliciesDelete(ctx context.Context, body PostFleetAgentPoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentPoliciesOutputsWithBody request with any body + PostFleetAgentPoliciesOutputsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentPoliciesOutputs(ctx context.Context, body PostFleetAgentPoliciesOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentPoliciesAgentpolicyid request + GetFleetAgentPoliciesAgentpolicyid(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutFleetAgentPoliciesAgentpolicyidWithBody request with any body + PutFleetAgentPoliciesAgentpolicyidWithBody(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutFleetAgentPoliciesAgentpolicyid(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, body PutFleetAgentPoliciesAgentpolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatus request + GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatus(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentPoliciesAgentpolicyidCopyWithBody request with any body + PostFleetAgentPoliciesAgentpolicyidCopyWithBody(ctx context.Context, agentPolicyId string, params *PostFleetAgentPoliciesAgentpolicyidCopyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentPoliciesAgentpolicyidCopy(ctx context.Context, agentPolicyId string, params *PostFleetAgentPoliciesAgentpolicyidCopyParams, body PostFleetAgentPoliciesAgentpolicyidCopyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentPoliciesAgentpolicyidDownload request + GetFleetAgentPoliciesAgentpolicyidDownload(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidDownloadParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentPoliciesAgentpolicyidFull request + GetFleetAgentPoliciesAgentpolicyidFull(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidFullParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentPoliciesAgentpolicyidOutputs request + GetFleetAgentPoliciesAgentpolicyidOutputs(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentStatus request + GetFleetAgentStatus(ctx context.Context, params *GetFleetAgentStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentStatusData request + GetFleetAgentStatusData(ctx context.Context, params *GetFleetAgentStatusDataParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgents request + GetFleetAgents(ctx context.Context, params *GetFleetAgentsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsWithBody request with any body + PostFleetAgentsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgents(ctx context.Context, body PostFleetAgentsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentsActionStatus request + GetFleetAgentsActionStatus(ctx context.Context, params *GetFleetAgentsActionStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsActionsActionidCancel request + PostFleetAgentsActionsActionidCancel(ctx context.Context, actionId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentsAvailableVersions request + GetFleetAgentsAvailableVersions(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsBulkReassignWithBody request with any body + PostFleetAgentsBulkReassignWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentsBulkReassign(ctx context.Context, body PostFleetAgentsBulkReassignJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsBulkRequestDiagnosticsWithBody request with any body + PostFleetAgentsBulkRequestDiagnosticsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentsBulkRequestDiagnostics(ctx context.Context, body PostFleetAgentsBulkRequestDiagnosticsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsBulkUnenrollWithBody request with any body + PostFleetAgentsBulkUnenrollWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentsBulkUnenroll(ctx context.Context, body PostFleetAgentsBulkUnenrollJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsBulkUpdateAgentTagsWithBody request with any body + PostFleetAgentsBulkUpdateAgentTagsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentsBulkUpdateAgentTags(ctx context.Context, body PostFleetAgentsBulkUpdateAgentTagsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsBulkUpgradeWithBody request with any body + PostFleetAgentsBulkUpgradeWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentsBulkUpgrade(ctx context.Context, body PostFleetAgentsBulkUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteFleetAgentsFilesFileid request + DeleteFleetAgentsFilesFileid(ctx context.Context, fileId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentsFilesFileidFilename request + GetFleetAgentsFilesFileidFilename(ctx context.Context, fileId string, fileName string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentsSetup request + GetFleetAgentsSetup(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsSetup request + PostFleetAgentsSetup(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentsTags request + GetFleetAgentsTags(ctx context.Context, params *GetFleetAgentsTagsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteFleetAgentsAgentid request + DeleteFleetAgentsAgentid(ctx context.Context, agentId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentsAgentid request + GetFleetAgentsAgentid(ctx context.Context, agentId string, params *GetFleetAgentsAgentidParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutFleetAgentsAgentidWithBody request with any body + PutFleetAgentsAgentidWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutFleetAgentsAgentid(ctx context.Context, agentId string, body PutFleetAgentsAgentidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsAgentidActionsWithBody request with any body + PostFleetAgentsAgentidActionsWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentsAgentidActions(ctx context.Context, agentId string, body PostFleetAgentsAgentidActionsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsAgentidReassignWithBody request with any body + PostFleetAgentsAgentidReassignWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentsAgentidReassign(ctx context.Context, agentId string, body PostFleetAgentsAgentidReassignJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsAgentidRequestDiagnosticsWithBody request with any body + PostFleetAgentsAgentidRequestDiagnosticsWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentsAgentidRequestDiagnostics(ctx context.Context, agentId string, body PostFleetAgentsAgentidRequestDiagnosticsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsAgentidUnenrollWithBody request with any body + PostFleetAgentsAgentidUnenrollWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentsAgentidUnenroll(ctx context.Context, agentId string, body PostFleetAgentsAgentidUnenrollJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetAgentsAgentidUpgradeWithBody request with any body + PostFleetAgentsAgentidUpgradeWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetAgentsAgentidUpgrade(ctx context.Context, agentId string, body PostFleetAgentsAgentidUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetAgentsAgentidUploads request + GetFleetAgentsAgentidUploads(ctx context.Context, agentId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetCheckPermissions request + GetFleetCheckPermissions(ctx context.Context, params *GetFleetCheckPermissionsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetDataStreams request + GetFleetDataStreams(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEnrollmentApiKeys request + GetFleetEnrollmentApiKeys(ctx context.Context, params *GetFleetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetEnrollmentApiKeysWithBody request with any body + PostFleetEnrollmentApiKeysWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetEnrollmentApiKeys(ctx context.Context, body PostFleetEnrollmentApiKeysJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteFleetEnrollmentApiKeysKeyid request + DeleteFleetEnrollmentApiKeysKeyid(ctx context.Context, keyId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEnrollmentApiKeysKeyid request + GetFleetEnrollmentApiKeysKeyid(ctx context.Context, keyId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetEpmBulkAssetsWithBody request with any body + PostFleetEpmBulkAssetsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetEpmBulkAssets(ctx context.Context, body PostFleetEpmBulkAssetsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmCategories request + GetFleetEpmCategories(ctx context.Context, params *GetFleetEpmCategoriesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetEpmCustomIntegrationsWithBody request with any body + PostFleetEpmCustomIntegrationsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetEpmCustomIntegrations(ctx context.Context, body PostFleetEpmCustomIntegrationsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutFleetEpmCustomIntegrationsPkgnameWithBody request with any body + PutFleetEpmCustomIntegrationsPkgnameWithBody(ctx context.Context, pkgName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutFleetEpmCustomIntegrationsPkgname(ctx context.Context, pkgName string, body PutFleetEpmCustomIntegrationsPkgnameJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmDataStreams request + GetFleetEpmDataStreams(ctx context.Context, params *GetFleetEpmDataStreamsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmPackages request + GetFleetEpmPackages(ctx context.Context, params *GetFleetEpmPackagesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetEpmPackagesWithBody request with any body + PostFleetEpmPackagesWithBody(ctx context.Context, params *PostFleetEpmPackagesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetEpmPackagesBulkWithBody request with any body + PostFleetEpmPackagesBulkWithBody(ctx context.Context, params *PostFleetEpmPackagesBulkParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetEpmPackagesBulk(ctx context.Context, params *PostFleetEpmPackagesBulkParams, body PostFleetEpmPackagesBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetEpmPackagesBulkUninstallWithBody request with any body + PostFleetEpmPackagesBulkUninstallWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetEpmPackagesBulkUninstall(ctx context.Context, body PostFleetEpmPackagesBulkUninstallJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmPackagesBulkUninstallTaskid request + GetFleetEpmPackagesBulkUninstallTaskid(ctx context.Context, taskId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetEpmPackagesBulkUpgradeWithBody request with any body + PostFleetEpmPackagesBulkUpgradeWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetEpmPackagesBulkUpgrade(ctx context.Context, body PostFleetEpmPackagesBulkUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmPackagesBulkUpgradeTaskid request + GetFleetEpmPackagesBulkUpgradeTaskid(ctx context.Context, taskId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmPackagesInstalled request + GetFleetEpmPackagesInstalled(ctx context.Context, params *GetFleetEpmPackagesInstalledParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmPackagesLimited request + GetFleetEpmPackagesLimited(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmPackagesPkgnameStats request + GetFleetEpmPackagesPkgnameStats(ctx context.Context, pkgName string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteFleetEpmPackagesPkgnamePkgversion request + DeleteFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmPackagesPkgnamePkgversion request + GetFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetEpmPackagesPkgnamePkgversionWithBody request with any body + PostFleetEpmPackagesPkgnamePkgversionWithBody(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, body PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutFleetEpmPackagesPkgnamePkgversionWithBody request with any body + PutFleetEpmPackagesPkgnamePkgversionWithBody(ctx context.Context, pkgName string, pkgVersion string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, body PutFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssets request + DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssets(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssets request + DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssets(ctx context.Context, pkgName string, pkgVersion string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithBody request with any body + PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithBody(ctx context.Context, pkgName string, pkgVersion string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetEpmPackagesPkgnamePkgversionKibanaAssets(ctx context.Context, pkgName string, pkgVersion string, body PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeWithBody request with any body + PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeWithBody(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorize(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams, body PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmPackagesPkgnamePkgversionFilepath request + GetFleetEpmPackagesPkgnamePkgversionFilepath(ctx context.Context, pkgName string, pkgVersion string, filePath string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmTemplatesPkgnamePkgversionInputs request + GetFleetEpmTemplatesPkgnamePkgversionInputs(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmTemplatesPkgnamePkgversionInputsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetEpmVerificationKeyId request + GetFleetEpmVerificationKeyId(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetFleetServerHosts request + GetFleetFleetServerHosts(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetFleetServerHostsWithBody request with any body + PostFleetFleetServerHostsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetFleetServerHosts(ctx context.Context, body PostFleetFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteFleetFleetServerHostsItemid request + DeleteFleetFleetServerHostsItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetFleetServerHostsItemid request + GetFleetFleetServerHostsItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutFleetFleetServerHostsItemidWithBody request with any body + PutFleetFleetServerHostsItemidWithBody(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutFleetFleetServerHostsItemid(ctx context.Context, itemId string, body PutFleetFleetServerHostsItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetHealthCheckWithBody request with any body + PostFleetHealthCheckWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetHealthCheck(ctx context.Context, body PostFleetHealthCheckJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetKubernetes request + GetFleetKubernetes(ctx context.Context, params *GetFleetKubernetesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetKubernetesDownload request + GetFleetKubernetesDownload(ctx context.Context, params *GetFleetKubernetesDownloadParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetLogstashApiKeys request + PostFleetLogstashApiKeys(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetMessageSigningServiceRotateKeyPair request + PostFleetMessageSigningServiceRotateKeyPair(ctx context.Context, params *PostFleetMessageSigningServiceRotateKeyPairParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetOutputs request + GetFleetOutputs(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetOutputsWithBody request with any body + PostFleetOutputsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetOutputs(ctx context.Context, body PostFleetOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteFleetOutputsOutputid request + DeleteFleetOutputsOutputid(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetOutputsOutputid request + GetFleetOutputsOutputid(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutFleetOutputsOutputidWithBody request with any body + PutFleetOutputsOutputidWithBody(ctx context.Context, outputId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutFleetOutputsOutputid(ctx context.Context, outputId string, body PutFleetOutputsOutputidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetOutputsOutputidHealth request + GetFleetOutputsOutputidHealth(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetPackagePolicies request + GetFleetPackagePolicies(ctx context.Context, params *GetFleetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetPackagePoliciesWithBody request with any body + PostFleetPackagePoliciesWithBody(ctx context.Context, params *PostFleetPackagePoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetPackagePolicies(ctx context.Context, params *PostFleetPackagePoliciesParams, body PostFleetPackagePoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetPackagePoliciesBulkGetWithBody request with any body + PostFleetPackagePoliciesBulkGetWithBody(ctx context.Context, params *PostFleetPackagePoliciesBulkGetParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetPackagePoliciesBulkGet(ctx context.Context, params *PostFleetPackagePoliciesBulkGetParams, body PostFleetPackagePoliciesBulkGetJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetPackagePoliciesDeleteWithBody request with any body + PostFleetPackagePoliciesDeleteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetPackagePoliciesDelete(ctx context.Context, body PostFleetPackagePoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetPackagePoliciesUpgradeWithBody request with any body + PostFleetPackagePoliciesUpgradeWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetPackagePoliciesUpgrade(ctx context.Context, body PostFleetPackagePoliciesUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetPackagePoliciesUpgradeDryrunWithBody request with any body + PostFleetPackagePoliciesUpgradeDryrunWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetPackagePoliciesUpgradeDryrun(ctx context.Context, body PostFleetPackagePoliciesUpgradeDryrunJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteFleetPackagePoliciesPackagepolicyid request + DeleteFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *DeleteFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetPackagePoliciesPackagepolicyid request + GetFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *GetFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutFleetPackagePoliciesPackagepolicyidWithBody request with any body + PutFleetPackagePoliciesPackagepolicyidWithBody(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, body PutFleetPackagePoliciesPackagepolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetProxies request + GetFleetProxies(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetProxiesWithBody request with any body + PostFleetProxiesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetProxies(ctx context.Context, body PostFleetProxiesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteFleetProxiesItemid request + DeleteFleetProxiesItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetProxiesItemid request + GetFleetProxiesItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutFleetProxiesItemidWithBody request with any body + PutFleetProxiesItemidWithBody(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutFleetProxiesItemid(ctx context.Context, itemId string, body PutFleetProxiesItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetRemoteSyncedIntegrationsStatus request + GetFleetRemoteSyncedIntegrationsStatus(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetRemoteSyncedIntegrationsOutputidRemoteStatus request + GetFleetRemoteSyncedIntegrationsOutputidRemoteStatus(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetServiceTokensWithBody request with any body + PostFleetServiceTokensWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostFleetServiceTokens(ctx context.Context, body PostFleetServiceTokensJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetSettings request + GetFleetSettings(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutFleetSettingsWithBody request with any body + PutFleetSettingsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutFleetSettings(ctx context.Context, body PutFleetSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostFleetSetup request + PostFleetSetup(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetSpaceSettings request + GetFleetSpaceSettings(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutFleetSpaceSettingsWithBody request with any body + PutFleetSpaceSettingsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutFleetSpaceSettings(ctx context.Context, body PutFleetSpaceSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetUninstallTokens request + GetFleetUninstallTokens(ctx context.Context, params *GetFleetUninstallTokensParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetFleetUninstallTokensUninstalltokenid request + GetFleetUninstallTokensUninstalltokenid(ctx context.Context, uninstallTokenId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteList request + DeleteList(ctx context.Context, params *DeleteListParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadList request + ReadList(ctx context.Context, params *ReadListParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PatchListWithBody request with any body + PatchListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PatchList(ctx context.Context, body PatchListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateListWithBody request with any body + CreateListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateList(ctx context.Context, body CreateListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateListWithBody request with any body + UpdateListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateList(ctx context.Context, body UpdateListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindLists request + FindLists(ctx context.Context, params *FindListsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteListIndex request + DeleteListIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadListIndex request + ReadListIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateListIndex request + CreateListIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteListItem request + DeleteListItem(ctx context.Context, params *DeleteListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadListItem request + ReadListItem(ctx context.Context, params *ReadListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PatchListItemWithBody request with any body + PatchListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PatchListItem(ctx context.Context, body PatchListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateListItemWithBody request with any body + CreateListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateListItem(ctx context.Context, body CreateListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateListItemWithBody request with any body + UpdateListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateListItem(ctx context.Context, body UpdateListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ExportListItems request + ExportListItems(ctx context.Context, params *ExportListItemsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindListItems request + FindListItems(ctx context.Context, params *FindListItemsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ImportListItemsWithBody request with any body + ImportListItemsWithBody(ctx context.Context, params *ImportListItemsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadListPrivileges request + ReadListPrivileges(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteLogstashPipeline request + DeleteLogstashPipeline(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetLogstashPipeline request + GetLogstashPipeline(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutLogstashPipelineWithBody request with any body + PutLogstashPipelineWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutLogstashPipeline(ctx context.Context, id string, body PutLogstashPipelineJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetLogstashPipelines request + GetLogstashPipelines(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostMaintenanceWindowIdArchive request + PostMaintenanceWindowIdArchive(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostMaintenanceWindowIdUnarchive request + PostMaintenanceWindowIdUnarchive(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // MlSync request + MlSync(ctx context.Context, params *MlSyncParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteNoteWithBody request with any body + DeleteNoteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteNote(ctx context.Context, body DeleteNoteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetNotes request + GetNotes(ctx context.Context, params *GetNotesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PersistNoteRouteWithBody request with any body + PersistNoteRouteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PersistNoteRoute(ctx context.Context, body PersistNoteRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ObservabilityAiAssistantChatCompleteWithBody request with any body + ObservabilityAiAssistantChatCompleteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + ObservabilityAiAssistantChatComplete(ctx context.Context, body ObservabilityAiAssistantChatCompleteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryFindLiveQueries request + OsqueryFindLiveQueries(ctx context.Context, params *OsqueryFindLiveQueriesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryCreateLiveQueryWithBody request with any body + OsqueryCreateLiveQueryWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + OsqueryCreateLiveQuery(ctx context.Context, body OsqueryCreateLiveQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryGetLiveQueryDetails request + OsqueryGetLiveQueryDetails(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryGetLiveQueryResults request + OsqueryGetLiveQueryResults(ctx context.Context, id string, actionId string, params *OsqueryGetLiveQueryResultsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryFindPacks request + OsqueryFindPacks(ctx context.Context, params *OsqueryFindPacksParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryCreatePacksWithBody request with any body + OsqueryCreatePacksWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + OsqueryCreatePacks(ctx context.Context, body OsqueryCreatePacksJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryDeletePacks request + OsqueryDeletePacks(ctx context.Context, id SecurityOsqueryAPIPackId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryGetPacksDetails request + OsqueryGetPacksDetails(ctx context.Context, id SecurityOsqueryAPIPackId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryUpdatePacksWithBody request with any body + OsqueryUpdatePacksWithBody(ctx context.Context, id SecurityOsqueryAPIPackId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + OsqueryUpdatePacks(ctx context.Context, id SecurityOsqueryAPIPackId, body OsqueryUpdatePacksJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryFindSavedQueries request + OsqueryFindSavedQueries(ctx context.Context, params *OsqueryFindSavedQueriesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryCreateSavedQueryWithBody request with any body + OsqueryCreateSavedQueryWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + OsqueryCreateSavedQuery(ctx context.Context, body OsqueryCreateSavedQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryDeleteSavedQuery request + OsqueryDeleteSavedQuery(ctx context.Context, id SecurityOsqueryAPISavedQueryId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryGetSavedQueryDetails request + OsqueryGetSavedQueryDetails(ctx context.Context, id SecurityOsqueryAPISavedQueryId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // OsqueryUpdateSavedQueryWithBody request with any body + OsqueryUpdateSavedQueryWithBody(ctx context.Context, id SecurityOsqueryAPISavedQueryId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + OsqueryUpdateSavedQuery(ctx context.Context, id SecurityOsqueryAPISavedQueryId, body OsqueryUpdateSavedQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PersistPinnedEventRouteWithBody request with any body + PersistPinnedEventRouteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PersistPinnedEventRoute(ctx context.Context, body PersistPinnedEventRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CleanUpRiskEngine request + CleanUpRiskEngine(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ConfigureRiskEngineSavedObjectWithBody request with any body + ConfigureRiskEngineSavedObjectWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + ConfigureRiskEngineSavedObject(ctx context.Context, body ConfigureRiskEngineSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ScheduleRiskEngineNowWithBody request with any body + ScheduleRiskEngineNowWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + ScheduleRiskEngineNow(ctx context.Context, body ScheduleRiskEngineNowJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // BulkCreateSavedObjectsWithBody request with any body + BulkCreateSavedObjectsWithBody(ctx context.Context, params *BulkCreateSavedObjectsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + BulkCreateSavedObjects(ctx context.Context, params *BulkCreateSavedObjectsParams, body BulkCreateSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // BulkDeleteSavedObjectsWithBody request with any body + BulkDeleteSavedObjectsWithBody(ctx context.Context, params *BulkDeleteSavedObjectsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + BulkDeleteSavedObjects(ctx context.Context, params *BulkDeleteSavedObjectsParams, body BulkDeleteSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // BulkGetSavedObjectsWithBody request with any body + BulkGetSavedObjectsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + BulkGetSavedObjects(ctx context.Context, body BulkGetSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // BulkResolveSavedObjectsWithBody request with any body + BulkResolveSavedObjectsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + BulkResolveSavedObjects(ctx context.Context, body BulkResolveSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // BulkUpdateSavedObjectsWithBody request with any body + BulkUpdateSavedObjectsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + BulkUpdateSavedObjects(ctx context.Context, body BulkUpdateSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSavedObjectsExportWithBody request with any body + PostSavedObjectsExportWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostSavedObjectsExport(ctx context.Context, body PostSavedObjectsExportJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindSavedObjects request + FindSavedObjects(ctx context.Context, params *FindSavedObjectsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSavedObjectsImportWithBody request with any body + PostSavedObjectsImportWithBody(ctx context.Context, params *PostSavedObjectsImportParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ResolveImportErrorsWithBody request with any body + ResolveImportErrorsWithBody(ctx context.Context, params *ResolveImportErrorsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ResolveSavedObject request + ResolveSavedObject(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateSavedObjectWithBody request with any body + CreateSavedObjectWithBody(ctx context.Context, pType SavedObjectsSavedObjectType, params *CreateSavedObjectParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateSavedObject(ctx context.Context, pType SavedObjectsSavedObjectType, params *CreateSavedObjectParams, body CreateSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetSavedObject request + GetSavedObject(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateSavedObjectIdWithBody request with any body + CreateSavedObjectIdWithBody(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, params *CreateSavedObjectIdParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateSavedObjectId(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, params *CreateSavedObjectIdParams, body CreateSavedObjectIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateSavedObjectWithBody request with any body + UpdateSavedObjectWithBody(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateSavedObject(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, body UpdateSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetSecurityRole request + GetSecurityRole(ctx context.Context, params *GetSecurityRoleParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSecurityRoleQueryWithBody request with any body + PostSecurityRoleQueryWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostSecurityRoleQuery(ctx context.Context, body PostSecurityRoleQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteSecurityRoleName request + DeleteSecurityRoleName(ctx context.Context, name string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetSecurityRoleName request + GetSecurityRoleName(ctx context.Context, name string, params *GetSecurityRoleNameParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutSecurityRoleNameWithBody request with any body + PutSecurityRoleNameWithBody(ctx context.Context, name string, params *PutSecurityRoleNameParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutSecurityRoleName(ctx context.Context, name string, params *PutSecurityRoleNameParams, body PutSecurityRoleNameJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSecurityRolesWithBody request with any body + PostSecurityRolesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostSecurityRoles(ctx context.Context, body PostSecurityRolesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSecuritySessionInvalidateWithBody request with any body + PostSecuritySessionInvalidateWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostSecuritySessionInvalidate(ctx context.Context, body PostSecuritySessionInvalidateJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PerformAnonymizationFieldsBulkActionWithBody request with any body + PerformAnonymizationFieldsBulkActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PerformAnonymizationFieldsBulkAction(ctx context.Context, body PerformAnonymizationFieldsBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindAnonymizationFields request + FindAnonymizationFields(ctx context.Context, params *FindAnonymizationFieldsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ChatCompleteWithBody request with any body + ChatCompleteWithBody(ctx context.Context, params *ChatCompleteParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + ChatComplete(ctx context.Context, params *ChatCompleteParams, body ChatCompleteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteAllConversationsWithBody request with any body + DeleteAllConversationsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteAllConversations(ctx context.Context, body DeleteAllConversationsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateConversationWithBody request with any body + CreateConversationWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateConversation(ctx context.Context, body CreateConversationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindConversations request + FindConversations(ctx context.Context, params *FindConversationsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteConversation request + DeleteConversation(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadConversation request + ReadConversation(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateConversationWithBody request with any body + UpdateConversationWithBody(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateConversation(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, body UpdateConversationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateKnowledgeBaseEntryWithBody request with any body + CreateKnowledgeBaseEntryWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateKnowledgeBaseEntry(ctx context.Context, body CreateKnowledgeBaseEntryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PerformKnowledgeBaseEntryBulkActionWithBody request with any body + PerformKnowledgeBaseEntryBulkActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PerformKnowledgeBaseEntryBulkAction(ctx context.Context, body PerformKnowledgeBaseEntryBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindKnowledgeBaseEntries request + FindKnowledgeBaseEntries(ctx context.Context, params *FindKnowledgeBaseEntriesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteKnowledgeBaseEntry request + DeleteKnowledgeBaseEntry(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadKnowledgeBaseEntry request + ReadKnowledgeBaseEntry(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateKnowledgeBaseEntryWithBody request with any body + UpdateKnowledgeBaseEntryWithBody(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateKnowledgeBaseEntry(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, body UpdateKnowledgeBaseEntryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ReadKnowledgeBase request + ReadKnowledgeBase(ctx context.Context, resource string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateKnowledgeBase request + CreateKnowledgeBase(ctx context.Context, resource string, params *CreateKnowledgeBaseParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PerformPromptsBulkActionWithBody request with any body + PerformPromptsBulkActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PerformPromptsBulkAction(ctx context.Context, body PerformPromptsBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindPrompts request + FindPrompts(ctx context.Context, params *FindPromptsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostUrlWithBody request with any body + PostUrlWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostUrl(ctx context.Context, body PostUrlJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ResolveUrl request + ResolveUrl(ctx context.Context, slug string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteUrl request + DeleteUrl(ctx context.Context, id ShortURLAPIsIdParam, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetUrl request + GetUrl(ctx context.Context, id ShortURLAPIsIdParam, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSpacesCopySavedObjectsWithBody request with any body + PostSpacesCopySavedObjectsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostSpacesCopySavedObjects(ctx context.Context, body PostSpacesCopySavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSpacesDisableLegacyUrlAliasesWithBody request with any body + PostSpacesDisableLegacyUrlAliasesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostSpacesDisableLegacyUrlAliases(ctx context.Context, body PostSpacesDisableLegacyUrlAliasesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSpacesGetShareableReferencesWithBody request with any body + PostSpacesGetShareableReferencesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostSpacesGetShareableReferences(ctx context.Context, body PostSpacesGetShareableReferencesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSpacesResolveCopySavedObjectsErrorsWithBody request with any body + PostSpacesResolveCopySavedObjectsErrorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostSpacesResolveCopySavedObjectsErrors(ctx context.Context, body PostSpacesResolveCopySavedObjectsErrorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSpacesUpdateObjectsSpacesWithBody request with any body + PostSpacesUpdateObjectsSpacesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostSpacesUpdateObjectsSpaces(ctx context.Context, body PostSpacesUpdateObjectsSpacesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetSpacesSpace request + GetSpacesSpace(ctx context.Context, params *GetSpacesSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSpacesSpaceWithBody request with any body + PostSpacesSpaceWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostSpacesSpace(ctx context.Context, body PostSpacesSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteSpacesSpaceId request + DeleteSpacesSpaceId(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetSpacesSpaceId request + GetSpacesSpaceId(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutSpacesSpaceIdWithBody request with any body + PutSpacesSpaceIdWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutSpacesSpaceId(ctx context.Context, id string, body PutSpacesSpaceIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetStatus request + GetStatus(ctx context.Context, params *GetStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetStreamsWithBody request with any body + GetStreamsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + GetStreams(ctx context.Context, body GetStreamsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostStreamsDisableWithBody request with any body + PostStreamsDisableWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostStreamsDisable(ctx context.Context, body PostStreamsDisableJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostStreamsEnableWithBody request with any body + PostStreamsEnableWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostStreamsEnable(ctx context.Context, body PostStreamsEnableJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostStreamsResyncWithBody request with any body + PostStreamsResyncWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostStreamsResync(ctx context.Context, body PostStreamsResyncJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteStreamsNameWithBody request with any body + DeleteStreamsNameWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteStreamsName(ctx context.Context, name string, body DeleteStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetStreamsNameWithBody request with any body + GetStreamsNameWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + GetStreamsName(ctx context.Context, name string, body GetStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutStreamsNameWithBody request with any body + PutStreamsNameWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutStreamsName(ctx context.Context, name string, body PutStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostStreamsNameForkWithBody request with any body + PostStreamsNameForkWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostStreamsNameFork(ctx context.Context, name string, body PostStreamsNameForkJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetStreamsNameGroupWithBody request with any body + GetStreamsNameGroupWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + GetStreamsNameGroup(ctx context.Context, name string, body GetStreamsNameGroupJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutStreamsNameGroupWithBody request with any body + PutStreamsNameGroupWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutStreamsNameGroup(ctx context.Context, name string, body PutStreamsNameGroupJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetStreamsNameIngestWithBody request with any body + GetStreamsNameIngestWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + GetStreamsNameIngest(ctx context.Context, name string, body GetStreamsNameIngestJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutStreamsNameIngestWithBody request with any body + PutStreamsNameIngestWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutStreamsNameIngest(ctx context.Context, name string, body PutStreamsNameIngestJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostStreamsNameContentExportWithBody request with any body + PostStreamsNameContentExportWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostStreamsNameContentExport(ctx context.Context, name string, body PostStreamsNameContentExportJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostStreamsNameContentImportWithBody request with any body + PostStreamsNameContentImportWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetStreamsNameDashboardsWithBody request with any body + GetStreamsNameDashboardsWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + GetStreamsNameDashboards(ctx context.Context, name string, body GetStreamsNameDashboardsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostStreamsNameDashboardsBulkWithBody request with any body + PostStreamsNameDashboardsBulkWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostStreamsNameDashboardsBulk(ctx context.Context, name string, body PostStreamsNameDashboardsBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteStreamsNameDashboardsDashboardidWithBody request with any body + DeleteStreamsNameDashboardsDashboardidWithBody(ctx context.Context, name string, dashboardId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteStreamsNameDashboardsDashboardid(ctx context.Context, name string, dashboardId string, body DeleteStreamsNameDashboardsDashboardidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutStreamsNameDashboardsDashboardidWithBody request with any body + PutStreamsNameDashboardsDashboardidWithBody(ctx context.Context, name string, dashboardId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutStreamsNameDashboardsDashboardid(ctx context.Context, name string, dashboardId string, body PutStreamsNameDashboardsDashboardidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetStreamsNameQueriesWithBody request with any body + GetStreamsNameQueriesWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + GetStreamsNameQueries(ctx context.Context, name string, body GetStreamsNameQueriesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostStreamsNameQueriesBulkWithBody request with any body + PostStreamsNameQueriesBulkWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostStreamsNameQueriesBulk(ctx context.Context, name string, body PostStreamsNameQueriesBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteStreamsNameQueriesQueryidWithBody request with any body + DeleteStreamsNameQueriesQueryidWithBody(ctx context.Context, name string, queryId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteStreamsNameQueriesQueryid(ctx context.Context, name string, queryId string, body DeleteStreamsNameQueriesQueryidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutStreamsNameQueriesQueryidWithBody request with any body + PutStreamsNameQueriesQueryidWithBody(ctx context.Context, name string, queryId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutStreamsNameQueriesQueryid(ctx context.Context, name string, queryId string, body PutStreamsNameQueriesQueryidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetStreamsNameRulesWithBody request with any body + GetStreamsNameRulesWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + GetStreamsNameRules(ctx context.Context, name string, body GetStreamsNameRulesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteStreamsNameRulesRuleidWithBody request with any body + DeleteStreamsNameRulesRuleidWithBody(ctx context.Context, name string, ruleId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteStreamsNameRulesRuleid(ctx context.Context, name string, ruleId string, body DeleteStreamsNameRulesRuleidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutStreamsNameRulesRuleidWithBody request with any body + PutStreamsNameRulesRuleidWithBody(ctx context.Context, name string, ruleId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutStreamsNameRulesRuleid(ctx context.Context, name string, ruleId string, body PutStreamsNameRulesRuleidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetStreamsNameSignificantEventsWithBody request with any body + GetStreamsNameSignificantEventsWithBody(ctx context.Context, name string, params *GetStreamsNameSignificantEventsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + GetStreamsNameSignificantEvents(ctx context.Context, name string, params *GetStreamsNameSignificantEventsParams, body GetStreamsNameSignificantEventsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetStreamsNameSignificantEventsGenerateWithBody request with any body + GetStreamsNameSignificantEventsGenerateWithBody(ctx context.Context, name string, params *GetStreamsNameSignificantEventsGenerateParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + GetStreamsNameSignificantEventsGenerate(ctx context.Context, name string, params *GetStreamsNameSignificantEventsGenerateParams, body GetStreamsNameSignificantEventsGenerateJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostStreamsNameSignificantEventsPreviewWithBody request with any body + PostStreamsNameSignificantEventsPreviewWithBody(ctx context.Context, name string, params *PostStreamsNameSignificantEventsPreviewParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostStreamsNameSignificantEventsPreview(ctx context.Context, name string, params *PostStreamsNameSignificantEventsPreviewParams, body PostStreamsNameSignificantEventsPreviewJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSyntheticsMonitorTest request + PostSyntheticsMonitorTest(ctx context.Context, monitorId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetSyntheticMonitors request + GetSyntheticMonitors(ctx context.Context, params *GetSyntheticMonitorsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostSyntheticMonitorsWithBody request with any body + PostSyntheticMonitorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostSyntheticMonitors(ctx context.Context, body PostSyntheticMonitorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteSyntheticMonitorsWithBody request with any body + DeleteSyntheticMonitorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteSyntheticMonitors(ctx context.Context, body DeleteSyntheticMonitorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteSyntheticMonitor request + DeleteSyntheticMonitor(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetSyntheticMonitor request + GetSyntheticMonitor(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutSyntheticMonitorWithBody request with any body + PutSyntheticMonitorWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutSyntheticMonitor(ctx context.Context, id string, body PutSyntheticMonitorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetParameters request + GetParameters(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostParametersWithBody request with any body + PostParametersWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostParameters(ctx context.Context, body PostParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteParametersWithBody request with any body + DeleteParametersWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteParameters(ctx context.Context, body DeleteParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteParameter request + DeleteParameter(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetParameter request + GetParameter(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutParameterWithBody request with any body + PutParameterWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutParameter(ctx context.Context, id string, body PutParameterJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetPrivateLocations request + GetPrivateLocations(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostPrivateLocationWithBody request with any body + PostPrivateLocationWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostPrivateLocation(ctx context.Context, body PostPrivateLocationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeletePrivateLocation request + DeletePrivateLocation(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetPrivateLocation request + GetPrivateLocation(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutPrivateLocationWithBody request with any body + PutPrivateLocationWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutPrivateLocation(ctx context.Context, id string, body PutPrivateLocationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // TaskManagerHealth request + TaskManagerHealth(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteTimelinesWithBody request with any body + DeleteTimelinesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteTimelines(ctx context.Context, body DeleteTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetTimeline request + GetTimeline(ctx context.Context, params *GetTimelineParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PatchTimelineWithBody request with any body + PatchTimelineWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PatchTimeline(ctx context.Context, body PatchTimelineJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateTimelinesWithBody request with any body + CreateTimelinesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateTimelines(ctx context.Context, body CreateTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CopyTimelineWithBody request with any body + CopyTimelineWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CopyTimeline(ctx context.Context, body CopyTimelineJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetDraftTimelines request + GetDraftTimelines(ctx context.Context, params *GetDraftTimelinesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CleanDraftTimelinesWithBody request with any body + CleanDraftTimelinesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CleanDraftTimelines(ctx context.Context, body CleanDraftTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ExportTimelinesWithBody request with any body + ExportTimelinesWithBody(ctx context.Context, params *ExportTimelinesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + ExportTimelines(ctx context.Context, params *ExportTimelinesParams, body ExportTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PersistFavoriteRouteWithBody request with any body + PersistFavoriteRouteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PersistFavoriteRoute(ctx context.Context, body PersistFavoriteRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ImportTimelinesWithBody request with any body + ImportTimelinesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + ImportTimelines(ctx context.Context, body ImportTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // InstallPrepackedTimelinesWithBody request with any body + InstallPrepackedTimelinesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + InstallPrepackedTimelines(ctx context.Context, body InstallPrepackedTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ResolveTimeline request + ResolveTimeline(ctx context.Context, params *ResolveTimelineParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetTimelines request + GetTimelines(ctx context.Context, params *GetTimelinesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetUpgradeStatus request + GetUpgradeStatus(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetUptimeSettings request + GetUptimeSettings(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutUptimeSettingsWithBody request with any body + PutUptimeSettingsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutUptimeSettings(ctx context.Context, body PutUptimeSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteActionsConnectorId request + DeleteActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetActionsConnectorId request + GetActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostActionsConnectorIdWithBody request with any body + PostActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PutActionsConnectorIdWithBody request with any body + PutActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PutActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetActionsConnectors request + GetActionsConnectors(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetAllDataViewsDefault request + GetAllDataViewsDefault(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateDataViewDefaultwWithBody request with any body + CreateDataViewDefaultwWithBody(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateDataViewDefaultw(ctx context.Context, spaceId SpaceId, body CreateDataViewDefaultwJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteDataViewDefault request + DeleteDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetDataViewDefault request + GetDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateDataViewDefaultWithBody request with any body + UpdateDataViewDefaultWithBody(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PostMaintenanceWindowWithBody request with any body + PostMaintenanceWindowWithBody(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PostMaintenanceWindow(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteMaintenanceWindowId request + DeleteMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetMaintenanceWindowId request + GetMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // PatchMaintenanceWindowIdWithBody request with any body + PatchMaintenanceWindowIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + PatchMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // FindSlosOp request + FindSlosOp(ctx context.Context, spaceId SLOsSpaceId, params *FindSlosOpParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // CreateSloOpWithBody request with any body + CreateSloOpWithBody(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + CreateSloOp(ctx context.Context, spaceId SLOsSpaceId, body CreateSloOpJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // BulkDeleteOpWithBody request with any body + BulkDeleteOpWithBody(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + BulkDeleteOp(ctx context.Context, spaceId SLOsSpaceId, body BulkDeleteOpJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // BulkDeleteStatusOp request + BulkDeleteStatusOp(ctx context.Context, spaceId SLOsSpaceId, taskId string, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteRollupDataOpWithBody request with any body + DeleteRollupDataOpWithBody(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteRollupDataOp(ctx context.Context, spaceId SLOsSpaceId, body DeleteRollupDataOpJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteSloInstancesOpWithBody request with any body + DeleteSloInstancesOpWithBody(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + DeleteSloInstancesOp(ctx context.Context, spaceId SLOsSpaceId, body DeleteSloInstancesOpJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DeleteSloOp request + DeleteSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetSloOp request + GetSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, params *GetSloOpParams, reqEditors ...RequestEditorFn) (*http.Response, error) + + // UpdateSloOpWithBody request with any body + UpdateSloOpWithBody(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + + UpdateSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, body UpdateSloOpJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + + // ResetSloOp request + ResetSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // DisableSloOp request + DisableSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // EnableSloOp request + EnableSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*http.Response, error) + + // GetDefinitionsOp request + GetDefinitionsOp(ctx context.Context, spaceId SLOsSpaceId, params *GetDefinitionsOpParams, reqEditors ...RequestEditorFn) (*http.Response, error) +} + +func (c *Client) PostActionsConnectorIdExecuteWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostActionsConnectorIdExecuteRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostActionsConnectorIdExecute(ctx context.Context, id string, body PostActionsConnectorIdExecuteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostActionsConnectorIdExecuteRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetActionsConnectorTypes(ctx context.Context, params *GetActionsConnectorTypesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetActionsConnectorTypesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetAlertingHealth(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetAlertingHealthRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteAlertingRuleId(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteAlertingRuleIdRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetAlertingRuleId(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetAlertingRuleIdRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleIdWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleIdRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleId(ctx context.Context, id string, body PostAlertingRuleIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleIdRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutAlertingRuleIdWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutAlertingRuleIdRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutAlertingRuleId(ctx context.Context, id string, body PutAlertingRuleIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutAlertingRuleIdRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleIdDisableWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleIdDisableRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleIdDisable(ctx context.Context, id string, body PostAlertingRuleIdDisableJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleIdDisableRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleIdEnable(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleIdEnableRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleIdMuteAll(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleIdMuteAllRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleIdUnmuteAll(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleIdUnmuteAllRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleIdUpdateApiKey(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleIdUpdateApiKeyRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleIdSnoozeScheduleWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleIdSnoozeScheduleRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleIdSnoozeSchedule(ctx context.Context, id string, body PostAlertingRuleIdSnoozeScheduleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleIdSnoozeScheduleRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteAlertingRuleRuleidSnoozeScheduleScheduleid(ctx context.Context, ruleId string, scheduleId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteAlertingRuleRuleidSnoozeScheduleScheduleidRequest(c.Server, ruleId, scheduleId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleRuleIdAlertAlertIdMute(ctx context.Context, ruleId string, alertId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleRuleIdAlertAlertIdMuteRequest(c.Server, ruleId, alertId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostAlertingRuleRuleIdAlertAlertIdUnmute(ctx context.Context, ruleId string, alertId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostAlertingRuleRuleIdAlertAlertIdUnmuteRequest(c.Server, ruleId, alertId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetRuleTypes(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetRuleTypesRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetAlertingRulesFind(ctx context.Context, params *GetAlertingRulesFindParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetAlertingRulesFindRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAgentKeyWithBody(ctx context.Context, params *CreateAgentKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAgentKeyRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAgentKey(ctx context.Context, params *CreateAgentKeyParams, body CreateAgentKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAgentKeyRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SaveApmServerSchemaWithBody(ctx context.Context, params *SaveApmServerSchemaParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSaveApmServerSchemaRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SaveApmServerSchema(ctx context.Context, params *SaveApmServerSchemaParams, body SaveApmServerSchemaJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSaveApmServerSchemaRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAnnotationWithBody(ctx context.Context, serviceName string, params *CreateAnnotationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAnnotationRequestWithBody(c.Server, serviceName, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAnnotation(ctx context.Context, serviceName string, params *CreateAnnotationParams, body CreateAnnotationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAnnotationRequest(c.Server, serviceName, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetAnnotation(ctx context.Context, serviceName string, params *GetAnnotationParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetAnnotationRequest(c.Server, serviceName, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteAgentConfigurationWithBody(ctx context.Context, params *DeleteAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteAgentConfigurationRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteAgentConfiguration(ctx context.Context, params *DeleteAgentConfigurationParams, body DeleteAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteAgentConfigurationRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetAgentConfigurations(ctx context.Context, params *GetAgentConfigurationsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetAgentConfigurationsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateUpdateAgentConfigurationWithBody(ctx context.Context, params *CreateUpdateAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateUpdateAgentConfigurationRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateUpdateAgentConfiguration(ctx context.Context, params *CreateUpdateAgentConfigurationParams, body CreateUpdateAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateUpdateAgentConfigurationRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetAgentNameForService(ctx context.Context, params *GetAgentNameForServiceParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetAgentNameForServiceRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetEnvironmentsForService(ctx context.Context, params *GetEnvironmentsForServiceParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetEnvironmentsForServiceRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SearchSingleConfigurationWithBody(ctx context.Context, params *SearchSingleConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSearchSingleConfigurationRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SearchSingleConfiguration(ctx context.Context, params *SearchSingleConfigurationParams, body SearchSingleConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSearchSingleConfigurationRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetSingleAgentConfiguration(ctx context.Context, params *GetSingleAgentConfigurationParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetSingleAgentConfigurationRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetSourceMaps(ctx context.Context, params *GetSourceMapsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetSourceMapsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UploadSourceMapWithBody(ctx context.Context, params *UploadSourceMapParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUploadSourceMapRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteSourceMap(ctx context.Context, id string, params *DeleteSourceMapParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteSourceMapRequest(c.Server, id, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteAssetCriticalityRecord(ctx context.Context, params *DeleteAssetCriticalityRecordParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteAssetCriticalityRecordRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetAssetCriticalityRecord(ctx context.Context, params *GetAssetCriticalityRecordParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetAssetCriticalityRecordRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAssetCriticalityRecordWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAssetCriticalityRecordRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAssetCriticalityRecord(ctx context.Context, body CreateAssetCriticalityRecordJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAssetCriticalityRecordRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkUpsertAssetCriticalityRecordsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkUpsertAssetCriticalityRecordsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkUpsertAssetCriticalityRecords(ctx context.Context, body BulkUpsertAssetCriticalityRecordsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkUpsertAssetCriticalityRecordsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindAssetCriticalityRecords(ctx context.Context, params *FindAssetCriticalityRecordsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindAssetCriticalityRecordsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteCaseDefaultSpace(ctx context.Context, params *DeleteCaseDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteCaseDefaultSpaceRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateCaseDefaultSpaceWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateCaseDefaultSpaceRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateCaseDefaultSpace(ctx context.Context, body UpdateCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateCaseDefaultSpaceRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateCaseDefaultSpaceWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateCaseDefaultSpaceRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateCaseDefaultSpace(ctx context.Context, body CreateCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateCaseDefaultSpaceRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindCasesDefaultSpace(ctx context.Context, params *FindCasesDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindCasesDefaultSpaceRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetCasesByAlertDefaultSpace(ctx context.Context, alertId CasesAlertId, params *GetCasesByAlertDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetCasesByAlertDefaultSpaceRequest(c.Server, alertId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetCaseConfigurationDefaultSpace(ctx context.Context, params *GetCaseConfigurationDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetCaseConfigurationDefaultSpaceRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SetCaseConfigurationDefaultSpaceWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSetCaseConfigurationDefaultSpaceRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SetCaseConfigurationDefaultSpace(ctx context.Context, body SetCaseConfigurationDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSetCaseConfigurationDefaultSpaceRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindCaseConnectorsDefaultSpace(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindCaseConnectorsDefaultSpaceRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateCaseConfigurationDefaultSpaceWithBody(ctx context.Context, configurationId CasesConfigurationId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateCaseConfigurationDefaultSpaceRequestWithBody(c.Server, configurationId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateCaseConfigurationDefaultSpace(ctx context.Context, configurationId CasesConfigurationId, body UpdateCaseConfigurationDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateCaseConfigurationDefaultSpaceRequest(c.Server, configurationId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetCaseReportersDefaultSpace(ctx context.Context, params *GetCaseReportersDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetCaseReportersDefaultSpaceRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetCaseTagsDefaultSpace(ctx context.Context, params *GetCaseTagsDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetCaseTagsDefaultSpaceRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetCaseDefaultSpace(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetCaseDefaultSpaceRequest(c.Server, caseId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetCaseAlertsDefaultSpace(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetCaseAlertsDefaultSpaceRequest(c.Server, caseId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteCaseCommentsDefaultSpace(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteCaseCommentsDefaultSpaceRequest(c.Server, caseId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateCaseCommentDefaultSpaceWithBody(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateCaseCommentDefaultSpaceRequestWithBody(c.Server, caseId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateCaseCommentDefaultSpace(ctx context.Context, caseId CasesCaseId, body UpdateCaseCommentDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateCaseCommentDefaultSpaceRequest(c.Server, caseId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) AddCaseCommentDefaultSpaceWithBody(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewAddCaseCommentDefaultSpaceRequestWithBody(c.Server, caseId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) AddCaseCommentDefaultSpace(ctx context.Context, caseId CasesCaseId, body AddCaseCommentDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewAddCaseCommentDefaultSpaceRequest(c.Server, caseId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindCaseCommentsDefaultSpace(ctx context.Context, caseId CasesCaseId, params *FindCaseCommentsDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindCaseCommentsDefaultSpaceRequest(c.Server, caseId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteCaseCommentDefaultSpace(ctx context.Context, caseId CasesCaseId, commentId CasesCommentId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteCaseCommentDefaultSpaceRequest(c.Server, caseId, commentId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetCaseCommentDefaultSpace(ctx context.Context, caseId CasesCaseId, commentId CasesCommentId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetCaseCommentDefaultSpaceRequest(c.Server, caseId, commentId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PushCaseDefaultSpaceWithBody(ctx context.Context, caseId CasesCaseId, connectorId CasesConnectorId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPushCaseDefaultSpaceRequestWithBody(c.Server, caseId, connectorId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PushCaseDefaultSpace(ctx context.Context, caseId CasesCaseId, connectorId CasesConnectorId, body PushCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPushCaseDefaultSpaceRequest(c.Server, caseId, connectorId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) AddCaseFileDefaultSpaceWithBody(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewAddCaseFileDefaultSpaceRequestWithBody(c.Server, caseId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindCaseActivityDefaultSpace(ctx context.Context, caseId CasesCaseId, params *FindCaseActivityDefaultSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindCaseActivityDefaultSpaceRequest(c.Server, caseId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateFieldsMetadataDefaultWithBody(ctx context.Context, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateFieldsMetadataDefaultRequestWithBody(c.Server, viewId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateFieldsMetadataDefault(ctx context.Context, viewId DataViewsViewId, body UpdateFieldsMetadataDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateFieldsMetadataDefaultRequest(c.Server, viewId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateRuntimeFieldDefaultWithBody(ctx context.Context, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateRuntimeFieldDefaultRequestWithBody(c.Server, viewId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateRuntimeFieldDefault(ctx context.Context, viewId DataViewsViewId, body CreateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateRuntimeFieldDefaultRequest(c.Server, viewId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateUpdateRuntimeFieldDefaultWithBody(ctx context.Context, viewId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateUpdateRuntimeFieldDefaultRequestWithBody(c.Server, viewId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateUpdateRuntimeFieldDefault(ctx context.Context, viewId string, body CreateUpdateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateUpdateRuntimeFieldDefaultRequest(c.Server, viewId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteRuntimeFieldDefault(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteRuntimeFieldDefaultRequest(c.Server, viewId, fieldName) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetRuntimeFieldDefault(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetRuntimeFieldDefaultRequest(c.Server, viewId, fieldName) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateRuntimeFieldDefaultWithBody(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateRuntimeFieldDefaultRequestWithBody(c.Server, viewId, fieldName, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateRuntimeFieldDefault(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, body UpdateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateRuntimeFieldDefaultRequest(c.Server, viewId, fieldName, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetDefaultDataViewDefault(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetDefaultDataViewDefaultRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SetDefaultDatailViewDefaultWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSetDefaultDatailViewDefaultRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SetDefaultDatailViewDefault(ctx context.Context, body SetDefaultDatailViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSetDefaultDatailViewDefaultRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SwapDataViewsDefaultWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSwapDataViewsDefaultRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SwapDataViewsDefault(ctx context.Context, body SwapDataViewsDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSwapDataViewsDefaultRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PreviewSwapDataViewsDefaultWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPreviewSwapDataViewsDefaultRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PreviewSwapDataViewsDefault(ctx context.Context, body PreviewSwapDataViewsDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPreviewSwapDataViewsDefaultRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteAlertsIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteAlertsIndexRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadAlertsIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadAlertsIndexRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAlertsIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAlertsIndexRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadPrivileges(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadPrivilegesRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteRule(ctx context.Context, params *DeleteRuleParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteRuleRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadRule(ctx context.Context, params *ReadRuleParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadRuleRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchRuleWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchRuleRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchRule(ctx context.Context, body PatchRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchRuleRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateRuleWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateRuleRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateRule(ctx context.Context, body CreateRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateRuleRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateRuleWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateRuleRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateRule(ctx context.Context, body UpdateRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateRuleRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PerformRulesBulkActionWithBody(ctx context.Context, params *PerformRulesBulkActionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPerformRulesBulkActionRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PerformRulesBulkAction(ctx context.Context, params *PerformRulesBulkActionParams, body PerformRulesBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPerformRulesBulkActionRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ExportRulesWithBody(ctx context.Context, params *ExportRulesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewExportRulesRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ExportRules(ctx context.Context, params *ExportRulesParams, body ExportRulesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewExportRulesRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindRules(ctx context.Context, params *FindRulesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindRulesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ImportRulesWithBody(ctx context.Context, params *ImportRulesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewImportRulesRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) InstallPrebuiltRulesAndTimelines(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewInstallPrebuiltRulesAndTimelinesRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadPrebuiltRulesAndTimelinesStatus(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadPrebuiltRulesAndTimelinesStatusRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) RulePreviewWithBody(ctx context.Context, params *RulePreviewParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewRulePreviewRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) RulePreview(ctx context.Context, params *RulePreviewParams, body RulePreviewJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewRulePreviewRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateRuleExceptionListItemsWithBody(ctx context.Context, id SecurityExceptionsAPIRuleId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateRuleExceptionListItemsRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateRuleExceptionListItems(ctx context.Context, id SecurityExceptionsAPIRuleId, body CreateRuleExceptionListItemsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateRuleExceptionListItemsRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SetAlertAssigneesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSetAlertAssigneesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SetAlertAssignees(ctx context.Context, body SetAlertAssigneesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSetAlertAssigneesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FinalizeAlertsMigrationWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFinalizeAlertsMigrationRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FinalizeAlertsMigration(ctx context.Context, body FinalizeAlertsMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFinalizeAlertsMigrationRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) AlertsMigrationCleanupWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewAlertsMigrationCleanupRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) AlertsMigrationCleanup(ctx context.Context, body AlertsMigrationCleanupJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewAlertsMigrationCleanupRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAlertsMigrationWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAlertsMigrationRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateAlertsMigration(ctx context.Context, body CreateAlertsMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateAlertsMigrationRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadAlertsMigrationStatus(ctx context.Context, params *ReadAlertsMigrationStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadAlertsMigrationStatusRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SearchAlertsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSearchAlertsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SearchAlerts(ctx context.Context, body SearchAlertsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSearchAlertsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SetAlertsStatusWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSetAlertsStatusRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SetAlertsStatus(ctx context.Context, body SetAlertsStatusJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSetAlertsStatusRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SetAlertTagsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSetAlertTagsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) SetAlertTags(ctx context.Context, body SetAlertTagsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewSetAlertTagsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadTags(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadTagsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) RotateEncryptionKey(ctx context.Context, params *RotateEncryptionKeyParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewRotateEncryptionKeyRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointGetActionsList(ctx context.Context, params *EndpointGetActionsListParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointGetActionsListRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointExecuteActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointExecuteActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointExecuteAction(ctx context.Context, body EndpointExecuteActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointExecuteActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointGetFileActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointGetFileActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointGetFileAction(ctx context.Context, body EndpointGetFileActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointGetFileActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointIsolateActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointIsolateActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointIsolateAction(ctx context.Context, body EndpointIsolateActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointIsolateActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointKillProcessActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointKillProcessActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointKillProcessAction(ctx context.Context, body EndpointKillProcessActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointKillProcessActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointGetProcessesActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointGetProcessesActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointGetProcessesAction(ctx context.Context, body EndpointGetProcessesActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointGetProcessesActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) RunScriptActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewRunScriptActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) RunScriptAction(ctx context.Context, body RunScriptActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewRunScriptActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointScanActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointScanActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointScanAction(ctx context.Context, body EndpointScanActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointScanActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointGetActionsState(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointGetActionsStateRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointSuspendProcessActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointSuspendProcessActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointSuspendProcessAction(ctx context.Context, body EndpointSuspendProcessActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointSuspendProcessActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointUnisolateActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointUnisolateActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointUnisolateAction(ctx context.Context, body EndpointUnisolateActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointUnisolateActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointUploadActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointUploadActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointGetActionsDetails(ctx context.Context, actionId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointGetActionsDetailsRequest(c.Server, actionId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointFileInfo(ctx context.Context, actionId string, fileId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointFileInfoRequest(c.Server, actionId, fileId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointFileDownload(ctx context.Context, actionId string, fileId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointFileDownloadRequest(c.Server, actionId, fileId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EndpointGetActionsStatus(ctx context.Context, params *EndpointGetActionsStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEndpointGetActionsStatusRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetEndpointMetadataList(ctx context.Context, params *GetEndpointMetadataListParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetEndpointMetadataListRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetEndpointMetadata(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetEndpointMetadataRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetPolicyResponse(ctx context.Context, params *GetPolicyResponseParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetPolicyResponseRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetProtectionUpdatesNote(ctx context.Context, packagePolicyId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetProtectionUpdatesNoteRequest(c.Server, packagePolicyId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateUpdateProtectionUpdatesNoteWithBody(ctx context.Context, packagePolicyId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateUpdateProtectionUpdatesNoteRequestWithBody(c.Server, packagePolicyId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateUpdateProtectionUpdatesNote(ctx context.Context, packagePolicyId string, body CreateUpdateProtectionUpdatesNoteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateUpdateProtectionUpdatesNoteRequest(c.Server, packagePolicyId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateEndpointList(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateEndpointListRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteEndpointListItem(ctx context.Context, params *DeleteEndpointListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteEndpointListItemRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadEndpointListItem(ctx context.Context, params *ReadEndpointListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadEndpointListItemRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateEndpointListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateEndpointListItemRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateEndpointListItem(ctx context.Context, body CreateEndpointListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateEndpointListItemRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateEndpointListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateEndpointListItemRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateEndpointListItem(ctx context.Context, body UpdateEndpointListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateEndpointListItemRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindEndpointListItems(ctx context.Context, params *FindEndpointListItemsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindEndpointListItemsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteMonitoringEngine(ctx context.Context, params *DeleteMonitoringEngineParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteMonitoringEngineRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DisableMonitoringEngine(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDisableMonitoringEngineRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) InitMonitoringEngine(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewInitMonitoringEngineRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ScheduleMonitoringEngine(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewScheduleMonitoringEngineRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PrivMonHealth(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPrivMonHealthRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PrivMonPrivileges(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPrivMonPrivilegesRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreatePrivMonUserWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreatePrivMonUserRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreatePrivMonUser(ctx context.Context, body CreatePrivMonUserJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreatePrivMonUserRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PrivmonBulkUploadUsersCSVWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPrivmonBulkUploadUsersCSVRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ListPrivMonUsers(ctx context.Context, params *ListPrivMonUsersParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewListPrivMonUsersRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeletePrivMonUser(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeletePrivMonUserRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdatePrivMonUserWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdatePrivMonUserRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdatePrivMonUser(ctx context.Context, id string, body UpdatePrivMonUserJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdatePrivMonUserRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) InstallPrivilegedAccessDetectionPackage(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewInstallPrivilegedAccessDetectionPackageRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetPrivilegedAccessDetectionPackageStatus(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetPrivilegedAccessDetectionPackageStatusRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) InitEntityStoreWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewInitEntityStoreRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) InitEntityStore(ctx context.Context, body InitEntityStoreJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewInitEntityStoreRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ListEntityEngines(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewListEntityEnginesRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ApplyEntityEngineDataviewIndices(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewApplyEntityEngineDataviewIndicesRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteEntityEngine(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, params *DeleteEntityEngineParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteEntityEngineRequest(c.Server, entityType, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetEntityEngine(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetEntityEngineRequest(c.Server, entityType) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) InitEntityEngineWithBody(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewInitEntityEngineRequestWithBody(c.Server, entityType, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) InitEntityEngine(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, body InitEntityEngineJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewInitEntityEngineRequest(c.Server, entityType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) StartEntityEngine(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewStartEntityEngineRequest(c.Server, entityType) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) StopEntityEngine(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewStopEntityEngineRequest(c.Server, entityType) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ListEntities(ctx context.Context, params *ListEntitiesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewListEntitiesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetEntityStoreStatus(ctx context.Context, params *GetEntityStoreStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetEntityStoreStatusRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteExceptionList(ctx context.Context, params *DeleteExceptionListParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteExceptionListRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadExceptionList(ctx context.Context, params *ReadExceptionListParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadExceptionListRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateExceptionListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateExceptionListRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateExceptionList(ctx context.Context, body CreateExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateExceptionListRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateExceptionListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateExceptionListRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateExceptionList(ctx context.Context, body UpdateExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateExceptionListRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DuplicateExceptionList(ctx context.Context, params *DuplicateExceptionListParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDuplicateExceptionListRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ExportExceptionList(ctx context.Context, params *ExportExceptionListParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewExportExceptionListRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindExceptionLists(ctx context.Context, params *FindExceptionListsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindExceptionListsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ImportExceptionListWithBody(ctx context.Context, params *ImportExceptionListParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewImportExceptionListRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteExceptionListItem(ctx context.Context, params *DeleteExceptionListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteExceptionListItemRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadExceptionListItem(ctx context.Context, params *ReadExceptionListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadExceptionListItemRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateExceptionListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateExceptionListItemRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateExceptionListItem(ctx context.Context, body CreateExceptionListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateExceptionListItemRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateExceptionListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateExceptionListItemRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateExceptionListItem(ctx context.Context, body UpdateExceptionListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateExceptionListItemRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindExceptionListItems(ctx context.Context, params *FindExceptionListItemsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindExceptionListItemsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadExceptionListSummary(ctx context.Context, params *ReadExceptionListSummaryParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadExceptionListSummaryRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateSharedExceptionListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateSharedExceptionListRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateSharedExceptionList(ctx context.Context, body CreateSharedExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateSharedExceptionListRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFeatures(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFeaturesRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentDownloadSources(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentDownloadSourcesRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentDownloadSourcesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentDownloadSourcesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentDownloadSources(ctx context.Context, body PostFleetAgentDownloadSourcesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentDownloadSourcesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetAgentDownloadSourcesSourceid(ctx context.Context, sourceId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetAgentDownloadSourcesSourceidRequest(c.Server, sourceId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentDownloadSourcesSourceid(ctx context.Context, sourceId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentDownloadSourcesSourceidRequest(c.Server, sourceId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetAgentDownloadSourcesSourceidWithBody(ctx context.Context, sourceId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetAgentDownloadSourcesSourceidRequestWithBody(c.Server, sourceId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetAgentDownloadSourcesSourceid(ctx context.Context, sourceId string, body PutFleetAgentDownloadSourcesSourceidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetAgentDownloadSourcesSourceidRequest(c.Server, sourceId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentPolicies(ctx context.Context, params *GetFleetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentPoliciesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentPoliciesWithBody(ctx context.Context, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentPoliciesRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentPolicies(ctx context.Context, params *PostFleetAgentPoliciesParams, body PostFleetAgentPoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentPoliciesRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentPoliciesBulkGetWithBody(ctx context.Context, params *PostFleetAgentPoliciesBulkGetParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentPoliciesBulkGetRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentPoliciesBulkGet(ctx context.Context, params *PostFleetAgentPoliciesBulkGetParams, body PostFleetAgentPoliciesBulkGetJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentPoliciesBulkGetRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentPoliciesDeleteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentPoliciesDeleteRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentPoliciesDelete(ctx context.Context, body PostFleetAgentPoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentPoliciesDeleteRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentPoliciesOutputsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentPoliciesOutputsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentPoliciesOutputs(ctx context.Context, body PostFleetAgentPoliciesOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentPoliciesOutputsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentPoliciesAgentpolicyid(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentPoliciesAgentpolicyidRequest(c.Server, agentPolicyId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetAgentPoliciesAgentpolicyidWithBody(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetAgentPoliciesAgentpolicyidRequestWithBody(c.Server, agentPolicyId, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetAgentPoliciesAgentpolicyid(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, body PutFleetAgentPoliciesAgentpolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetAgentPoliciesAgentpolicyidRequest(c.Server, agentPolicyId, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatus(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusRequest(c.Server, agentPolicyId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentPoliciesAgentpolicyidCopyWithBody(ctx context.Context, agentPolicyId string, params *PostFleetAgentPoliciesAgentpolicyidCopyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentPoliciesAgentpolicyidCopyRequestWithBody(c.Server, agentPolicyId, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentPoliciesAgentpolicyidCopy(ctx context.Context, agentPolicyId string, params *PostFleetAgentPoliciesAgentpolicyidCopyParams, body PostFleetAgentPoliciesAgentpolicyidCopyJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentPoliciesAgentpolicyidCopyRequest(c.Server, agentPolicyId, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentPoliciesAgentpolicyidDownload(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidDownloadParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentPoliciesAgentpolicyidDownloadRequest(c.Server, agentPolicyId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentPoliciesAgentpolicyidFull(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidFullParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentPoliciesAgentpolicyidFullRequest(c.Server, agentPolicyId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentPoliciesAgentpolicyidOutputs(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentPoliciesAgentpolicyidOutputsRequest(c.Server, agentPolicyId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentStatus(ctx context.Context, params *GetFleetAgentStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentStatusRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentStatusData(ctx context.Context, params *GetFleetAgentStatusDataParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentStatusDataRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgents(ctx context.Context, params *GetFleetAgentsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgents(ctx context.Context, body PostFleetAgentsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentsActionStatus(ctx context.Context, params *GetFleetAgentsActionStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentsActionStatusRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsActionsActionidCancel(ctx context.Context, actionId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsActionsActionidCancelRequest(c.Server, actionId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentsAvailableVersions(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentsAvailableVersionsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsBulkReassignWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsBulkReassignRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsBulkReassign(ctx context.Context, body PostFleetAgentsBulkReassignJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsBulkReassignRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsBulkRequestDiagnosticsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsBulkRequestDiagnosticsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsBulkRequestDiagnostics(ctx context.Context, body PostFleetAgentsBulkRequestDiagnosticsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsBulkRequestDiagnosticsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsBulkUnenrollWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsBulkUnenrollRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsBulkUnenroll(ctx context.Context, body PostFleetAgentsBulkUnenrollJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsBulkUnenrollRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsBulkUpdateAgentTagsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsBulkUpdateAgentTagsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsBulkUpdateAgentTags(ctx context.Context, body PostFleetAgentsBulkUpdateAgentTagsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsBulkUpdateAgentTagsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsBulkUpgradeWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsBulkUpgradeRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsBulkUpgrade(ctx context.Context, body PostFleetAgentsBulkUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsBulkUpgradeRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetAgentsFilesFileid(ctx context.Context, fileId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetAgentsFilesFileidRequest(c.Server, fileId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentsFilesFileidFilename(ctx context.Context, fileId string, fileName string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentsFilesFileidFilenameRequest(c.Server, fileId, fileName) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentsSetup(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentsSetupRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsSetup(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsSetupRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentsTags(ctx context.Context, params *GetFleetAgentsTagsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentsTagsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetAgentsAgentid(ctx context.Context, agentId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetAgentsAgentidRequest(c.Server, agentId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentsAgentid(ctx context.Context, agentId string, params *GetFleetAgentsAgentidParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentsAgentidRequest(c.Server, agentId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetAgentsAgentidWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetAgentsAgentidRequestWithBody(c.Server, agentId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetAgentsAgentid(ctx context.Context, agentId string, body PutFleetAgentsAgentidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetAgentsAgentidRequest(c.Server, agentId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsAgentidActionsWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsAgentidActionsRequestWithBody(c.Server, agentId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsAgentidActions(ctx context.Context, agentId string, body PostFleetAgentsAgentidActionsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsAgentidActionsRequest(c.Server, agentId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsAgentidReassignWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsAgentidReassignRequestWithBody(c.Server, agentId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsAgentidReassign(ctx context.Context, agentId string, body PostFleetAgentsAgentidReassignJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsAgentidReassignRequest(c.Server, agentId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsAgentidRequestDiagnosticsWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsAgentidRequestDiagnosticsRequestWithBody(c.Server, agentId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsAgentidRequestDiagnostics(ctx context.Context, agentId string, body PostFleetAgentsAgentidRequestDiagnosticsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsAgentidRequestDiagnosticsRequest(c.Server, agentId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsAgentidUnenrollWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsAgentidUnenrollRequestWithBody(c.Server, agentId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsAgentidUnenroll(ctx context.Context, agentId string, body PostFleetAgentsAgentidUnenrollJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsAgentidUnenrollRequest(c.Server, agentId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsAgentidUpgradeWithBody(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsAgentidUpgradeRequestWithBody(c.Server, agentId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetAgentsAgentidUpgrade(ctx context.Context, agentId string, body PostFleetAgentsAgentidUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetAgentsAgentidUpgradeRequest(c.Server, agentId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetAgentsAgentidUploads(ctx context.Context, agentId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetAgentsAgentidUploadsRequest(c.Server, agentId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetCheckPermissions(ctx context.Context, params *GetFleetCheckPermissionsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetCheckPermissionsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetDataStreams(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetDataStreamsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEnrollmentApiKeys(ctx context.Context, params *GetFleetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEnrollmentApiKeysRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEnrollmentApiKeysWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEnrollmentApiKeysRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEnrollmentApiKeys(ctx context.Context, body PostFleetEnrollmentApiKeysJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEnrollmentApiKeysRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetEnrollmentApiKeysKeyid(ctx context.Context, keyId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetEnrollmentApiKeysKeyidRequest(c.Server, keyId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEnrollmentApiKeysKeyid(ctx context.Context, keyId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEnrollmentApiKeysKeyidRequest(c.Server, keyId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmBulkAssetsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmBulkAssetsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmBulkAssets(ctx context.Context, body PostFleetEpmBulkAssetsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmBulkAssetsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmCategories(ctx context.Context, params *GetFleetEpmCategoriesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmCategoriesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmCustomIntegrationsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmCustomIntegrationsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmCustomIntegrations(ctx context.Context, body PostFleetEpmCustomIntegrationsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmCustomIntegrationsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetEpmCustomIntegrationsPkgnameWithBody(ctx context.Context, pkgName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetEpmCustomIntegrationsPkgnameRequestWithBody(c.Server, pkgName, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetEpmCustomIntegrationsPkgname(ctx context.Context, pkgName string, body PutFleetEpmCustomIntegrationsPkgnameJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetEpmCustomIntegrationsPkgnameRequest(c.Server, pkgName, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmDataStreams(ctx context.Context, params *GetFleetEpmDataStreamsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmDataStreamsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmPackages(ctx context.Context, params *GetFleetEpmPackagesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmPackagesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesWithBody(ctx context.Context, params *PostFleetEpmPackagesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesBulkWithBody(ctx context.Context, params *PostFleetEpmPackagesBulkParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesBulkRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesBulk(ctx context.Context, params *PostFleetEpmPackagesBulkParams, body PostFleetEpmPackagesBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesBulkRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesBulkUninstallWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesBulkUninstallRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesBulkUninstall(ctx context.Context, body PostFleetEpmPackagesBulkUninstallJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesBulkUninstallRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmPackagesBulkUninstallTaskid(ctx context.Context, taskId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmPackagesBulkUninstallTaskidRequest(c.Server, taskId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesBulkUpgradeWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesBulkUpgradeRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesBulkUpgrade(ctx context.Context, body PostFleetEpmPackagesBulkUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesBulkUpgradeRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmPackagesBulkUpgradeTaskid(ctx context.Context, taskId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmPackagesBulkUpgradeTaskidRequest(c.Server, taskId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmPackagesInstalled(ctx context.Context, params *GetFleetEpmPackagesInstalledParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmPackagesInstalledRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmPackagesLimited(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmPackagesLimitedRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmPackagesPkgnameStats(ctx context.Context, pkgName string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmPackagesPkgnameStatsRequest(c.Server, pkgName) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetEpmPackagesPkgnamePkgversionRequest(c.Server, pkgName, pkgVersion, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmPackagesPkgnamePkgversionRequest(c.Server, pkgName, pkgVersion, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesPkgnamePkgversionWithBody(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesPkgnamePkgversionRequestWithBody(c.Server, pkgName, pkgVersion, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, body PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesPkgnamePkgversionRequest(c.Server, pkgName, pkgVersion, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetEpmPackagesPkgnamePkgversionWithBody(ctx context.Context, pkgName string, pkgVersion string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetEpmPackagesPkgnamePkgversionRequestWithBody(c.Server, pkgName, pkgVersion, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, body PutFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetEpmPackagesPkgnamePkgversionRequest(c.Server, pkgName, pkgVersion, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssets(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsRequest(c.Server, pkgName, pkgVersion, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssets(ctx context.Context, pkgName string, pkgVersion string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsRequest(c.Server, pkgName, pkgVersion) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithBody(ctx context.Context, pkgName string, pkgVersion string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesPkgnamePkgversionKibanaAssetsRequestWithBody(c.Server, pkgName, pkgVersion, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesPkgnamePkgversionKibanaAssets(ctx context.Context, pkgName string, pkgVersion string, body PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesPkgnamePkgversionKibanaAssetsRequest(c.Server, pkgName, pkgVersion, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeWithBody(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeRequestWithBody(c.Server, pkgName, pkgVersion, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorize(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams, body PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeRequest(c.Server, pkgName, pkgVersion, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmPackagesPkgnamePkgversionFilepath(ctx context.Context, pkgName string, pkgVersion string, filePath string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmPackagesPkgnamePkgversionFilepathRequest(c.Server, pkgName, pkgVersion, filePath) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmTemplatesPkgnamePkgversionInputs(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmTemplatesPkgnamePkgversionInputsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmTemplatesPkgnamePkgversionInputsRequest(c.Server, pkgName, pkgVersion, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetEpmVerificationKeyId(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetEpmVerificationKeyIdRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetFleetServerHosts(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetFleetServerHostsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetFleetServerHostsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetFleetServerHostsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetFleetServerHosts(ctx context.Context, body PostFleetFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetFleetServerHostsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetFleetServerHostsItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetFleetServerHostsItemidRequest(c.Server, itemId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetFleetServerHostsItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetFleetServerHostsItemidRequest(c.Server, itemId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetFleetServerHostsItemidWithBody(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetFleetServerHostsItemidRequestWithBody(c.Server, itemId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetFleetServerHostsItemid(ctx context.Context, itemId string, body PutFleetFleetServerHostsItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetFleetServerHostsItemidRequest(c.Server, itemId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetHealthCheckWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetHealthCheckRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetHealthCheck(ctx context.Context, body PostFleetHealthCheckJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetHealthCheckRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetKubernetes(ctx context.Context, params *GetFleetKubernetesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetKubernetesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetKubernetesDownload(ctx context.Context, params *GetFleetKubernetesDownloadParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetKubernetesDownloadRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetLogstashApiKeys(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetLogstashApiKeysRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetMessageSigningServiceRotateKeyPair(ctx context.Context, params *PostFleetMessageSigningServiceRotateKeyPairParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetMessageSigningServiceRotateKeyPairRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetOutputs(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetOutputsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetOutputsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetOutputsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetOutputs(ctx context.Context, body PostFleetOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetOutputsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetOutputsOutputid(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetOutputsOutputidRequest(c.Server, outputId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetOutputsOutputid(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetOutputsOutputidRequest(c.Server, outputId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetOutputsOutputidWithBody(ctx context.Context, outputId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetOutputsOutputidRequestWithBody(c.Server, outputId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetOutputsOutputid(ctx context.Context, outputId string, body PutFleetOutputsOutputidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetOutputsOutputidRequest(c.Server, outputId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetOutputsOutputidHealth(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetOutputsOutputidHealthRequest(c.Server, outputId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetPackagePolicies(ctx context.Context, params *GetFleetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetPackagePoliciesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetPackagePoliciesWithBody(ctx context.Context, params *PostFleetPackagePoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetPackagePoliciesRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetPackagePolicies(ctx context.Context, params *PostFleetPackagePoliciesParams, body PostFleetPackagePoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetPackagePoliciesRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetPackagePoliciesBulkGetWithBody(ctx context.Context, params *PostFleetPackagePoliciesBulkGetParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetPackagePoliciesBulkGetRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetPackagePoliciesBulkGet(ctx context.Context, params *PostFleetPackagePoliciesBulkGetParams, body PostFleetPackagePoliciesBulkGetJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetPackagePoliciesBulkGetRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetPackagePoliciesDeleteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetPackagePoliciesDeleteRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetPackagePoliciesDelete(ctx context.Context, body PostFleetPackagePoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetPackagePoliciesDeleteRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetPackagePoliciesUpgradeWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetPackagePoliciesUpgradeRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetPackagePoliciesUpgrade(ctx context.Context, body PostFleetPackagePoliciesUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetPackagePoliciesUpgradeRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetPackagePoliciesUpgradeDryrunWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetPackagePoliciesUpgradeDryrunRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetPackagePoliciesUpgradeDryrun(ctx context.Context, body PostFleetPackagePoliciesUpgradeDryrunJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetPackagePoliciesUpgradeDryrunRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *DeleteFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetPackagePoliciesPackagepolicyidRequest(c.Server, packagePolicyId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *GetFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetPackagePoliciesPackagepolicyidRequest(c.Server, packagePolicyId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetPackagePoliciesPackagepolicyidWithBody(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetPackagePoliciesPackagepolicyidRequestWithBody(c.Server, packagePolicyId, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, body PutFleetPackagePoliciesPackagepolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetPackagePoliciesPackagepolicyidRequest(c.Server, packagePolicyId, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetProxies(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetProxiesRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetProxiesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetProxiesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetProxies(ctx context.Context, body PostFleetProxiesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetProxiesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteFleetProxiesItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteFleetProxiesItemidRequest(c.Server, itemId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetProxiesItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetProxiesItemidRequest(c.Server, itemId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetProxiesItemidWithBody(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetProxiesItemidRequestWithBody(c.Server, itemId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetProxiesItemid(ctx context.Context, itemId string, body PutFleetProxiesItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetProxiesItemidRequest(c.Server, itemId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetRemoteSyncedIntegrationsStatus(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetRemoteSyncedIntegrationsStatusRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetRemoteSyncedIntegrationsOutputidRemoteStatus(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetRemoteSyncedIntegrationsOutputidRemoteStatusRequest(c.Server, outputId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetServiceTokensWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetServiceTokensRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetServiceTokens(ctx context.Context, body PostFleetServiceTokensJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetServiceTokensRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetSettings(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetSettingsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetSettingsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetSettingsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetSettings(ctx context.Context, body PutFleetSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetSettingsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostFleetSetup(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostFleetSetupRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetSpaceSettings(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetSpaceSettingsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetSpaceSettingsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetSpaceSettingsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutFleetSpaceSettings(ctx context.Context, body PutFleetSpaceSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutFleetSpaceSettingsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetUninstallTokens(ctx context.Context, params *GetFleetUninstallTokensParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetUninstallTokensRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetFleetUninstallTokensUninstalltokenid(ctx context.Context, uninstallTokenId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetFleetUninstallTokensUninstalltokenidRequest(c.Server, uninstallTokenId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteList(ctx context.Context, params *DeleteListParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteListRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadList(ctx context.Context, params *ReadListParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadListRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchListRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchList(ctx context.Context, body PatchListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchListRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateListRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateList(ctx context.Context, body CreateListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateListRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateListWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateListRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateList(ctx context.Context, body UpdateListJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateListRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindLists(ctx context.Context, params *FindListsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindListsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteListIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteListIndexRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadListIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadListIndexRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateListIndex(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateListIndexRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteListItem(ctx context.Context, params *DeleteListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteListItemRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadListItem(ctx context.Context, params *ReadListItemParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadListItemRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchListItemRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchListItem(ctx context.Context, body PatchListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchListItemRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateListItemRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateListItem(ctx context.Context, body CreateListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateListItemRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateListItemWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateListItemRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateListItem(ctx context.Context, body UpdateListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateListItemRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ExportListItems(ctx context.Context, params *ExportListItemsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewExportListItemsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindListItems(ctx context.Context, params *FindListItemsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindListItemsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ImportListItemsWithBody(ctx context.Context, params *ImportListItemsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewImportListItemsRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadListPrivileges(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadListPrivilegesRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteLogstashPipeline(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteLogstashPipelineRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetLogstashPipeline(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetLogstashPipelineRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutLogstashPipelineWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutLogstashPipelineRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutLogstashPipeline(ctx context.Context, id string, body PutLogstashPipelineJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutLogstashPipelineRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetLogstashPipelines(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetLogstashPipelinesRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostMaintenanceWindowIdArchive(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostMaintenanceWindowIdArchiveRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostMaintenanceWindowIdUnarchive(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostMaintenanceWindowIdUnarchiveRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) MlSync(ctx context.Context, params *MlSyncParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewMlSyncRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteNoteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteNoteRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteNote(ctx context.Context, body DeleteNoteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteNoteRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetNotes(ctx context.Context, params *GetNotesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetNotesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PersistNoteRouteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPersistNoteRouteRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PersistNoteRoute(ctx context.Context, body PersistNoteRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPersistNoteRouteRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ObservabilityAiAssistantChatCompleteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewObservabilityAiAssistantChatCompleteRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ObservabilityAiAssistantChatComplete(ctx context.Context, body ObservabilityAiAssistantChatCompleteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewObservabilityAiAssistantChatCompleteRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryFindLiveQueries(ctx context.Context, params *OsqueryFindLiveQueriesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryFindLiveQueriesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryCreateLiveQueryWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryCreateLiveQueryRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryCreateLiveQuery(ctx context.Context, body OsqueryCreateLiveQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryCreateLiveQueryRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryGetLiveQueryDetails(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryGetLiveQueryDetailsRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryGetLiveQueryResults(ctx context.Context, id string, actionId string, params *OsqueryGetLiveQueryResultsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryGetLiveQueryResultsRequest(c.Server, id, actionId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryFindPacks(ctx context.Context, params *OsqueryFindPacksParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryFindPacksRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryCreatePacksWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryCreatePacksRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryCreatePacks(ctx context.Context, body OsqueryCreatePacksJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryCreatePacksRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryDeletePacks(ctx context.Context, id SecurityOsqueryAPIPackId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryDeletePacksRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryGetPacksDetails(ctx context.Context, id SecurityOsqueryAPIPackId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryGetPacksDetailsRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryUpdatePacksWithBody(ctx context.Context, id SecurityOsqueryAPIPackId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryUpdatePacksRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryUpdatePacks(ctx context.Context, id SecurityOsqueryAPIPackId, body OsqueryUpdatePacksJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryUpdatePacksRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryFindSavedQueries(ctx context.Context, params *OsqueryFindSavedQueriesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryFindSavedQueriesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryCreateSavedQueryWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryCreateSavedQueryRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryCreateSavedQuery(ctx context.Context, body OsqueryCreateSavedQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryCreateSavedQueryRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryDeleteSavedQuery(ctx context.Context, id SecurityOsqueryAPISavedQueryId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryDeleteSavedQueryRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryGetSavedQueryDetails(ctx context.Context, id SecurityOsqueryAPISavedQueryId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryGetSavedQueryDetailsRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryUpdateSavedQueryWithBody(ctx context.Context, id SecurityOsqueryAPISavedQueryId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryUpdateSavedQueryRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) OsqueryUpdateSavedQuery(ctx context.Context, id SecurityOsqueryAPISavedQueryId, body OsqueryUpdateSavedQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewOsqueryUpdateSavedQueryRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PersistPinnedEventRouteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPersistPinnedEventRouteRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PersistPinnedEventRoute(ctx context.Context, body PersistPinnedEventRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPersistPinnedEventRouteRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CleanUpRiskEngine(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCleanUpRiskEngineRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ConfigureRiskEngineSavedObjectWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewConfigureRiskEngineSavedObjectRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ConfigureRiskEngineSavedObject(ctx context.Context, body ConfigureRiskEngineSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewConfigureRiskEngineSavedObjectRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ScheduleRiskEngineNowWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewScheduleRiskEngineNowRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ScheduleRiskEngineNow(ctx context.Context, body ScheduleRiskEngineNowJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewScheduleRiskEngineNowRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkCreateSavedObjectsWithBody(ctx context.Context, params *BulkCreateSavedObjectsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkCreateSavedObjectsRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkCreateSavedObjects(ctx context.Context, params *BulkCreateSavedObjectsParams, body BulkCreateSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkCreateSavedObjectsRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkDeleteSavedObjectsWithBody(ctx context.Context, params *BulkDeleteSavedObjectsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkDeleteSavedObjectsRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkDeleteSavedObjects(ctx context.Context, params *BulkDeleteSavedObjectsParams, body BulkDeleteSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkDeleteSavedObjectsRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkGetSavedObjectsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkGetSavedObjectsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkGetSavedObjects(ctx context.Context, body BulkGetSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkGetSavedObjectsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkResolveSavedObjectsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkResolveSavedObjectsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkResolveSavedObjects(ctx context.Context, body BulkResolveSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkResolveSavedObjectsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkUpdateSavedObjectsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkUpdateSavedObjectsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkUpdateSavedObjects(ctx context.Context, body BulkUpdateSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkUpdateSavedObjectsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSavedObjectsExportWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSavedObjectsExportRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSavedObjectsExport(ctx context.Context, body PostSavedObjectsExportJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSavedObjectsExportRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindSavedObjects(ctx context.Context, params *FindSavedObjectsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindSavedObjectsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSavedObjectsImportWithBody(ctx context.Context, params *PostSavedObjectsImportParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSavedObjectsImportRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ResolveImportErrorsWithBody(ctx context.Context, params *ResolveImportErrorsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewResolveImportErrorsRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ResolveSavedObject(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewResolveSavedObjectRequest(c.Server, pType, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateSavedObjectWithBody(ctx context.Context, pType SavedObjectsSavedObjectType, params *CreateSavedObjectParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateSavedObjectRequestWithBody(c.Server, pType, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateSavedObject(ctx context.Context, pType SavedObjectsSavedObjectType, params *CreateSavedObjectParams, body CreateSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateSavedObjectRequest(c.Server, pType, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetSavedObject(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetSavedObjectRequest(c.Server, pType, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateSavedObjectIdWithBody(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, params *CreateSavedObjectIdParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateSavedObjectIdRequestWithBody(c.Server, pType, id, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateSavedObjectId(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, params *CreateSavedObjectIdParams, body CreateSavedObjectIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateSavedObjectIdRequest(c.Server, pType, id, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateSavedObjectWithBody(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateSavedObjectRequestWithBody(c.Server, pType, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateSavedObject(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, body UpdateSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateSavedObjectRequest(c.Server, pType, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetSecurityRole(ctx context.Context, params *GetSecurityRoleParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetSecurityRoleRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSecurityRoleQueryWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSecurityRoleQueryRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSecurityRoleQuery(ctx context.Context, body PostSecurityRoleQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSecurityRoleQueryRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteSecurityRoleName(ctx context.Context, name string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteSecurityRoleNameRequest(c.Server, name) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetSecurityRoleName(ctx context.Context, name string, params *GetSecurityRoleNameParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetSecurityRoleNameRequest(c.Server, name, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutSecurityRoleNameWithBody(ctx context.Context, name string, params *PutSecurityRoleNameParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutSecurityRoleNameRequestWithBody(c.Server, name, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutSecurityRoleName(ctx context.Context, name string, params *PutSecurityRoleNameParams, body PutSecurityRoleNameJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutSecurityRoleNameRequest(c.Server, name, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSecurityRolesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSecurityRolesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSecurityRoles(ctx context.Context, body PostSecurityRolesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSecurityRolesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSecuritySessionInvalidateWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSecuritySessionInvalidateRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSecuritySessionInvalidate(ctx context.Context, body PostSecuritySessionInvalidateJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSecuritySessionInvalidateRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PerformAnonymizationFieldsBulkActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPerformAnonymizationFieldsBulkActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PerformAnonymizationFieldsBulkAction(ctx context.Context, body PerformAnonymizationFieldsBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPerformAnonymizationFieldsBulkActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindAnonymizationFields(ctx context.Context, params *FindAnonymizationFieldsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindAnonymizationFieldsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ChatCompleteWithBody(ctx context.Context, params *ChatCompleteParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewChatCompleteRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ChatComplete(ctx context.Context, params *ChatCompleteParams, body ChatCompleteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewChatCompleteRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteAllConversationsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteAllConversationsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteAllConversations(ctx context.Context, body DeleteAllConversationsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteAllConversationsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateConversationWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateConversationRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateConversation(ctx context.Context, body CreateConversationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateConversationRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindConversations(ctx context.Context, params *FindConversationsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindConversationsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteConversation(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteConversationRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadConversation(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadConversationRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateConversationWithBody(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateConversationRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateConversation(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, body UpdateConversationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateConversationRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateKnowledgeBaseEntryWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateKnowledgeBaseEntryRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateKnowledgeBaseEntry(ctx context.Context, body CreateKnowledgeBaseEntryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateKnowledgeBaseEntryRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PerformKnowledgeBaseEntryBulkActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPerformKnowledgeBaseEntryBulkActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PerformKnowledgeBaseEntryBulkAction(ctx context.Context, body PerformKnowledgeBaseEntryBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPerformKnowledgeBaseEntryBulkActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindKnowledgeBaseEntries(ctx context.Context, params *FindKnowledgeBaseEntriesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindKnowledgeBaseEntriesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteKnowledgeBaseEntry(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteKnowledgeBaseEntryRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadKnowledgeBaseEntry(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadKnowledgeBaseEntryRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateKnowledgeBaseEntryWithBody(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateKnowledgeBaseEntryRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateKnowledgeBaseEntry(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, body UpdateKnowledgeBaseEntryJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateKnowledgeBaseEntryRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ReadKnowledgeBase(ctx context.Context, resource string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewReadKnowledgeBaseRequest(c.Server, resource) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateKnowledgeBase(ctx context.Context, resource string, params *CreateKnowledgeBaseParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateKnowledgeBaseRequest(c.Server, resource, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PerformPromptsBulkActionWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPerformPromptsBulkActionRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PerformPromptsBulkAction(ctx context.Context, body PerformPromptsBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPerformPromptsBulkActionRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindPrompts(ctx context.Context, params *FindPromptsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindPromptsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostUrlWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostUrlRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostUrl(ctx context.Context, body PostUrlJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostUrlRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ResolveUrl(ctx context.Context, slug string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewResolveUrlRequest(c.Server, slug) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteUrl(ctx context.Context, id ShortURLAPIsIdParam, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteUrlRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetUrl(ctx context.Context, id ShortURLAPIsIdParam, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetUrlRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesCopySavedObjectsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesCopySavedObjectsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesCopySavedObjects(ctx context.Context, body PostSpacesCopySavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesCopySavedObjectsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesDisableLegacyUrlAliasesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesDisableLegacyUrlAliasesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesDisableLegacyUrlAliases(ctx context.Context, body PostSpacesDisableLegacyUrlAliasesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesDisableLegacyUrlAliasesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesGetShareableReferencesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesGetShareableReferencesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesGetShareableReferences(ctx context.Context, body PostSpacesGetShareableReferencesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesGetShareableReferencesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesResolveCopySavedObjectsErrorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesResolveCopySavedObjectsErrorsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesResolveCopySavedObjectsErrors(ctx context.Context, body PostSpacesResolveCopySavedObjectsErrorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesResolveCopySavedObjectsErrorsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesUpdateObjectsSpacesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesUpdateObjectsSpacesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesUpdateObjectsSpaces(ctx context.Context, body PostSpacesUpdateObjectsSpacesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesUpdateObjectsSpacesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetSpacesSpace(ctx context.Context, params *GetSpacesSpaceParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetSpacesSpaceRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesSpaceWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesSpaceRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSpacesSpace(ctx context.Context, body PostSpacesSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSpacesSpaceRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteSpacesSpaceId(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteSpacesSpaceIdRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetSpacesSpaceId(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetSpacesSpaceIdRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutSpacesSpaceIdWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutSpacesSpaceIdRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutSpacesSpaceId(ctx context.Context, id string, body PutSpacesSpaceIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutSpacesSpaceIdRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStatus(ctx context.Context, params *GetStatusParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStatusRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreams(ctx context.Context, body GetStreamsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsDisableWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsDisableRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsDisable(ctx context.Context, body PostStreamsDisableJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsDisableRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsEnableWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsEnableRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsEnable(ctx context.Context, body PostStreamsEnableJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsEnableRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsResyncWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsResyncRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsResync(ctx context.Context, body PostStreamsResyncJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsResyncRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteStreamsNameWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteStreamsNameRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteStreamsName(ctx context.Context, name string, body DeleteStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteStreamsNameRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsName(ctx context.Context, name string, body GetStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsNameWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsName(ctx context.Context, name string, body PutStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsNameForkWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsNameForkRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsNameFork(ctx context.Context, name string, body PostStreamsNameForkJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsNameForkRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameGroupWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameGroupRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameGroup(ctx context.Context, name string, body GetStreamsNameGroupJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameGroupRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsNameGroupWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameGroupRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsNameGroup(ctx context.Context, name string, body PutStreamsNameGroupJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameGroupRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameIngestWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameIngestRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameIngest(ctx context.Context, name string, body GetStreamsNameIngestJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameIngestRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsNameIngestWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameIngestRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsNameIngest(ctx context.Context, name string, body PutStreamsNameIngestJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameIngestRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsNameContentExportWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsNameContentExportRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsNameContentExport(ctx context.Context, name string, body PostStreamsNameContentExportJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsNameContentExportRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsNameContentImportWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsNameContentImportRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameDashboardsWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameDashboardsRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameDashboards(ctx context.Context, name string, body GetStreamsNameDashboardsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameDashboardsRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsNameDashboardsBulkWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsNameDashboardsBulkRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsNameDashboardsBulk(ctx context.Context, name string, body PostStreamsNameDashboardsBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsNameDashboardsBulkRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteStreamsNameDashboardsDashboardidWithBody(ctx context.Context, name string, dashboardId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteStreamsNameDashboardsDashboardidRequestWithBody(c.Server, name, dashboardId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteStreamsNameDashboardsDashboardid(ctx context.Context, name string, dashboardId string, body DeleteStreamsNameDashboardsDashboardidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteStreamsNameDashboardsDashboardidRequest(c.Server, name, dashboardId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsNameDashboardsDashboardidWithBody(ctx context.Context, name string, dashboardId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameDashboardsDashboardidRequestWithBody(c.Server, name, dashboardId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsNameDashboardsDashboardid(ctx context.Context, name string, dashboardId string, body PutStreamsNameDashboardsDashboardidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameDashboardsDashboardidRequest(c.Server, name, dashboardId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameQueriesWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameQueriesRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameQueries(ctx context.Context, name string, body GetStreamsNameQueriesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameQueriesRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsNameQueriesBulkWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsNameQueriesBulkRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsNameQueriesBulk(ctx context.Context, name string, body PostStreamsNameQueriesBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsNameQueriesBulkRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteStreamsNameQueriesQueryidWithBody(ctx context.Context, name string, queryId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteStreamsNameQueriesQueryidRequestWithBody(c.Server, name, queryId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteStreamsNameQueriesQueryid(ctx context.Context, name string, queryId string, body DeleteStreamsNameQueriesQueryidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteStreamsNameQueriesQueryidRequest(c.Server, name, queryId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsNameQueriesQueryidWithBody(ctx context.Context, name string, queryId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameQueriesQueryidRequestWithBody(c.Server, name, queryId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsNameQueriesQueryid(ctx context.Context, name string, queryId string, body PutStreamsNameQueriesQueryidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameQueriesQueryidRequest(c.Server, name, queryId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameRulesWithBody(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameRulesRequestWithBody(c.Server, name, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameRules(ctx context.Context, name string, body GetStreamsNameRulesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameRulesRequest(c.Server, name, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteStreamsNameRulesRuleidWithBody(ctx context.Context, name string, ruleId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteStreamsNameRulesRuleidRequestWithBody(c.Server, name, ruleId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteStreamsNameRulesRuleid(ctx context.Context, name string, ruleId string, body DeleteStreamsNameRulesRuleidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteStreamsNameRulesRuleidRequest(c.Server, name, ruleId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsNameRulesRuleidWithBody(ctx context.Context, name string, ruleId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameRulesRuleidRequestWithBody(c.Server, name, ruleId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutStreamsNameRulesRuleid(ctx context.Context, name string, ruleId string, body PutStreamsNameRulesRuleidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutStreamsNameRulesRuleidRequest(c.Server, name, ruleId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameSignificantEventsWithBody(ctx context.Context, name string, params *GetStreamsNameSignificantEventsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameSignificantEventsRequestWithBody(c.Server, name, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameSignificantEvents(ctx context.Context, name string, params *GetStreamsNameSignificantEventsParams, body GetStreamsNameSignificantEventsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameSignificantEventsRequest(c.Server, name, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameSignificantEventsGenerateWithBody(ctx context.Context, name string, params *GetStreamsNameSignificantEventsGenerateParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameSignificantEventsGenerateRequestWithBody(c.Server, name, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetStreamsNameSignificantEventsGenerate(ctx context.Context, name string, params *GetStreamsNameSignificantEventsGenerateParams, body GetStreamsNameSignificantEventsGenerateJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetStreamsNameSignificantEventsGenerateRequest(c.Server, name, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsNameSignificantEventsPreviewWithBody(ctx context.Context, name string, params *PostStreamsNameSignificantEventsPreviewParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsNameSignificantEventsPreviewRequestWithBody(c.Server, name, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostStreamsNameSignificantEventsPreview(ctx context.Context, name string, params *PostStreamsNameSignificantEventsPreviewParams, body PostStreamsNameSignificantEventsPreviewJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostStreamsNameSignificantEventsPreviewRequest(c.Server, name, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSyntheticsMonitorTest(ctx context.Context, monitorId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSyntheticsMonitorTestRequest(c.Server, monitorId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetSyntheticMonitors(ctx context.Context, params *GetSyntheticMonitorsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetSyntheticMonitorsRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSyntheticMonitorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSyntheticMonitorsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostSyntheticMonitors(ctx context.Context, body PostSyntheticMonitorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostSyntheticMonitorsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteSyntheticMonitorsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteSyntheticMonitorsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteSyntheticMonitors(ctx context.Context, body DeleteSyntheticMonitorsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteSyntheticMonitorsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteSyntheticMonitor(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteSyntheticMonitorRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetSyntheticMonitor(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetSyntheticMonitorRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutSyntheticMonitorWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutSyntheticMonitorRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutSyntheticMonitor(ctx context.Context, id string, body PutSyntheticMonitorJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutSyntheticMonitorRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetParameters(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetParametersRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostParametersWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostParametersRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostParameters(ctx context.Context, body PostParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostParametersRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteParametersWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteParametersRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteParameters(ctx context.Context, body DeleteParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteParametersRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteParameter(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteParameterRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetParameter(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetParameterRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutParameterWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutParameterRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutParameter(ctx context.Context, id string, body PutParameterJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutParameterRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetPrivateLocations(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetPrivateLocationsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostPrivateLocationWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostPrivateLocationRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostPrivateLocation(ctx context.Context, body PostPrivateLocationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostPrivateLocationRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeletePrivateLocation(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeletePrivateLocationRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetPrivateLocation(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetPrivateLocationRequest(c.Server, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutPrivateLocationWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutPrivateLocationRequestWithBody(c.Server, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutPrivateLocation(ctx context.Context, id string, body PutPrivateLocationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutPrivateLocationRequest(c.Server, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) TaskManagerHealth(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewTaskManagerHealthRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteTimelinesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteTimelinesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteTimelines(ctx context.Context, body DeleteTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteTimelinesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetTimeline(ctx context.Context, params *GetTimelineParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetTimelineRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchTimelineWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchTimelineRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchTimeline(ctx context.Context, body PatchTimelineJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchTimelineRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateTimelinesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateTimelinesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateTimelines(ctx context.Context, body CreateTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateTimelinesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CopyTimelineWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCopyTimelineRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CopyTimeline(ctx context.Context, body CopyTimelineJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCopyTimelineRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetDraftTimelines(ctx context.Context, params *GetDraftTimelinesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetDraftTimelinesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CleanDraftTimelinesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCleanDraftTimelinesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CleanDraftTimelines(ctx context.Context, body CleanDraftTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCleanDraftTimelinesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ExportTimelinesWithBody(ctx context.Context, params *ExportTimelinesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewExportTimelinesRequestWithBody(c.Server, params, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ExportTimelines(ctx context.Context, params *ExportTimelinesParams, body ExportTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewExportTimelinesRequest(c.Server, params, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PersistFavoriteRouteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPersistFavoriteRouteRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PersistFavoriteRoute(ctx context.Context, body PersistFavoriteRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPersistFavoriteRouteRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ImportTimelinesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewImportTimelinesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ImportTimelines(ctx context.Context, body ImportTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewImportTimelinesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) InstallPrepackedTimelinesWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewInstallPrepackedTimelinesRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) InstallPrepackedTimelines(ctx context.Context, body InstallPrepackedTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewInstallPrepackedTimelinesRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ResolveTimeline(ctx context.Context, params *ResolveTimelineParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewResolveTimelineRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetTimelines(ctx context.Context, params *GetTimelinesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetTimelinesRequest(c.Server, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetUpgradeStatus(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetUpgradeStatusRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetUptimeSettings(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetUptimeSettingsRequest(c.Server) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutUptimeSettingsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutUptimeSettingsRequestWithBody(c.Server, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutUptimeSettings(ctx context.Context, body PutUptimeSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutUptimeSettingsRequest(c.Server, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteActionsConnectorIdRequest(c.Server, spaceId, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetActionsConnectorIdRequest(c.Server, spaceId, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostActionsConnectorIdRequestWithBody(c.Server, spaceId, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostActionsConnectorIdRequest(c.Server, spaceId, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutActionsConnectorIdRequestWithBody(c.Server, spaceId, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PutActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPutActionsConnectorIdRequest(c.Server, spaceId, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetActionsConnectors(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetActionsConnectorsRequest(c.Server, spaceId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetAllDataViewsDefault(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetAllDataViewsDefaultRequest(c.Server, spaceId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateDataViewDefaultwWithBody(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateDataViewDefaultwRequestWithBody(c.Server, spaceId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateDataViewDefaultw(ctx context.Context, spaceId SpaceId, body CreateDataViewDefaultwJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateDataViewDefaultwRequest(c.Server, spaceId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteDataViewDefaultRequest(c.Server, spaceId, viewId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetDataViewDefaultRequest(c.Server, spaceId, viewId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateDataViewDefaultWithBody(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateDataViewDefaultRequestWithBody(c.Server, spaceId, viewId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateDataViewDefaultRequest(c.Server, spaceId, viewId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostMaintenanceWindowWithBody(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostMaintenanceWindowRequestWithBody(c.Server, spaceId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PostMaintenanceWindow(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPostMaintenanceWindowRequest(c.Server, spaceId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteMaintenanceWindowIdRequest(c.Server, spaceId, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetMaintenanceWindowIdRequest(c.Server, spaceId, id) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchMaintenanceWindowIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchMaintenanceWindowIdRequestWithBody(c.Server, spaceId, id, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) PatchMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewPatchMaintenanceWindowIdRequest(c.Server, spaceId, id, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) FindSlosOp(ctx context.Context, spaceId SLOsSpaceId, params *FindSlosOpParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewFindSlosOpRequest(c.Server, spaceId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateSloOpWithBody(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateSloOpRequestWithBody(c.Server, spaceId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) CreateSloOp(ctx context.Context, spaceId SLOsSpaceId, body CreateSloOpJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewCreateSloOpRequest(c.Server, spaceId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkDeleteOpWithBody(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkDeleteOpRequestWithBody(c.Server, spaceId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkDeleteOp(ctx context.Context, spaceId SLOsSpaceId, body BulkDeleteOpJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkDeleteOpRequest(c.Server, spaceId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) BulkDeleteStatusOp(ctx context.Context, spaceId SLOsSpaceId, taskId string, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewBulkDeleteStatusOpRequest(c.Server, spaceId, taskId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteRollupDataOpWithBody(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteRollupDataOpRequestWithBody(c.Server, spaceId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteRollupDataOp(ctx context.Context, spaceId SLOsSpaceId, body DeleteRollupDataOpJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteRollupDataOpRequest(c.Server, spaceId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteSloInstancesOpWithBody(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteSloInstancesOpRequestWithBody(c.Server, spaceId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteSloInstancesOp(ctx context.Context, spaceId SLOsSpaceId, body DeleteSloInstancesOpJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteSloInstancesOpRequest(c.Server, spaceId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DeleteSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDeleteSloOpRequest(c.Server, spaceId, sloId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, params *GetSloOpParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetSloOpRequest(c.Server, spaceId, sloId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateSloOpWithBody(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateSloOpRequestWithBody(c.Server, spaceId, sloId, contentType, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) UpdateSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, body UpdateSloOpJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewUpdateSloOpRequest(c.Server, spaceId, sloId, body) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) ResetSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewResetSloOpRequest(c.Server, spaceId, sloId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) DisableSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewDisableSloOpRequest(c.Server, spaceId, sloId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) EnableSloOp(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewEnableSloOpRequest(c.Server, spaceId, sloId) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +func (c *Client) GetDefinitionsOp(ctx context.Context, spaceId SLOsSpaceId, params *GetDefinitionsOpParams, reqEditors ...RequestEditorFn) (*http.Response, error) { + req, err := NewGetDefinitionsOpRequest(c.Server, spaceId, params) + if err != nil { + return nil, err + } + req = req.WithContext(ctx) + if err := c.applyEditors(ctx, req, reqEditors); err != nil { + return nil, err + } + return c.Client.Do(req) +} + +// NewPostActionsConnectorIdExecuteRequest calls the generic PostActionsConnectorIdExecute builder with application/json body +func NewPostActionsConnectorIdExecuteRequest(server string, id string, body PostActionsConnectorIdExecuteJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostActionsConnectorIdExecuteRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewPostActionsConnectorIdExecuteRequestWithBody generates requests for PostActionsConnectorIdExecute with any type of body +func NewPostActionsConnectorIdExecuteRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/actions/connector/%s/_execute", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetActionsConnectorTypesRequest generates requests for GetActionsConnectorTypes +func NewGetActionsConnectorTypesRequest(server string, params *GetActionsConnectorTypesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/actions/connector_types") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.FeatureId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "feature_id", runtime.ParamLocationQuery, *params.FeatureId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetAlertingHealthRequest generates requests for GetAlertingHealth +func NewGetAlertingHealthRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/_health") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteAlertingRuleIdRequest generates requests for DeleteAlertingRuleId +func NewDeleteAlertingRuleIdRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetAlertingRuleIdRequest generates requests for GetAlertingRuleId +func NewGetAlertingRuleIdRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostAlertingRuleIdRequest calls the generic PostAlertingRuleId builder with application/json body +func NewPostAlertingRuleIdRequest(server string, id string, body PostAlertingRuleIdJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostAlertingRuleIdRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewPostAlertingRuleIdRequestWithBody generates requests for PostAlertingRuleId with any type of body +func NewPostAlertingRuleIdRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPutAlertingRuleIdRequest calls the generic PutAlertingRuleId builder with application/json body +func NewPutAlertingRuleIdRequest(server string, id string, body PutAlertingRuleIdJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutAlertingRuleIdRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewPutAlertingRuleIdRequestWithBody generates requests for PutAlertingRuleId with any type of body +func NewPutAlertingRuleIdRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostAlertingRuleIdDisableRequest calls the generic PostAlertingRuleIdDisable builder with application/json body +func NewPostAlertingRuleIdDisableRequest(server string, id string, body PostAlertingRuleIdDisableJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostAlertingRuleIdDisableRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewPostAlertingRuleIdDisableRequestWithBody generates requests for PostAlertingRuleIdDisable with any type of body +func NewPostAlertingRuleIdDisableRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s/_disable", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostAlertingRuleIdEnableRequest generates requests for PostAlertingRuleIdEnable +func NewPostAlertingRuleIdEnableRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s/_enable", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostAlertingRuleIdMuteAllRequest generates requests for PostAlertingRuleIdMuteAll +func NewPostAlertingRuleIdMuteAllRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s/_mute_all", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostAlertingRuleIdUnmuteAllRequest generates requests for PostAlertingRuleIdUnmuteAll +func NewPostAlertingRuleIdUnmuteAllRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s/_unmute_all", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostAlertingRuleIdUpdateApiKeyRequest generates requests for PostAlertingRuleIdUpdateApiKey +func NewPostAlertingRuleIdUpdateApiKeyRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s/_update_api_key", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostAlertingRuleIdSnoozeScheduleRequest calls the generic PostAlertingRuleIdSnoozeSchedule builder with application/json body +func NewPostAlertingRuleIdSnoozeScheduleRequest(server string, id string, body PostAlertingRuleIdSnoozeScheduleJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostAlertingRuleIdSnoozeScheduleRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewPostAlertingRuleIdSnoozeScheduleRequestWithBody generates requests for PostAlertingRuleIdSnoozeSchedule with any type of body +func NewPostAlertingRuleIdSnoozeScheduleRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s/snooze_schedule", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteAlertingRuleRuleidSnoozeScheduleScheduleidRequest generates requests for DeleteAlertingRuleRuleidSnoozeScheduleScheduleid +func NewDeleteAlertingRuleRuleidSnoozeScheduleScheduleidRequest(server string, ruleId string, scheduleId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "ruleId", runtime.ParamLocationPath, ruleId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "scheduleId", runtime.ParamLocationPath, scheduleId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s/snooze_schedule/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostAlertingRuleRuleIdAlertAlertIdMuteRequest generates requests for PostAlertingRuleRuleIdAlertAlertIdMute +func NewPostAlertingRuleRuleIdAlertAlertIdMuteRequest(server string, ruleId string, alertId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "rule_id", runtime.ParamLocationPath, ruleId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "alert_id", runtime.ParamLocationPath, alertId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s/alert/%s/_mute", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostAlertingRuleRuleIdAlertAlertIdUnmuteRequest generates requests for PostAlertingRuleRuleIdAlertAlertIdUnmute +func NewPostAlertingRuleRuleIdAlertAlertIdUnmuteRequest(server string, ruleId string, alertId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "rule_id", runtime.ParamLocationPath, ruleId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "alert_id", runtime.ParamLocationPath, alertId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule/%s/alert/%s/_unmute", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetRuleTypesRequest generates requests for GetRuleTypes +func NewGetRuleTypesRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rule_types") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetAlertingRulesFindRequest generates requests for GetAlertingRulesFind +func NewGetAlertingRulesFindRequest(server string, params *GetAlertingRulesFindParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/alerting/rules/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Search != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "search", runtime.ParamLocationQuery, *params.Search); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.DefaultSearchOperator != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "default_search_operator", runtime.ParamLocationQuery, *params.DefaultSearchOperator); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SearchFields != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "search_fields", runtime.ParamLocationQuery, *params.SearchFields); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.HasReference != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "has_reference", runtime.ParamLocationQuery, *params.HasReference); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Fields != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "fields", runtime.ParamLocationQuery, *params.Fields); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.FilterConsumers != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter_consumers", runtime.ParamLocationQuery, *params.FilterConsumers); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateAgentKeyRequest calls the generic CreateAgentKey builder with application/json body +func NewCreateAgentKeyRequest(server string, params *CreateAgentKeyParams, body CreateAgentKeyJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateAgentKeyRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewCreateAgentKeyRequestWithBody generates requests for CreateAgentKey with any type of body +func NewCreateAgentKeyRequestWithBody(server string, params *CreateAgentKeyParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/agent_keys") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewSaveApmServerSchemaRequest calls the generic SaveApmServerSchema builder with application/json body +func NewSaveApmServerSchemaRequest(server string, params *SaveApmServerSchemaParams, body SaveApmServerSchemaJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewSaveApmServerSchemaRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewSaveApmServerSchemaRequestWithBody generates requests for SaveApmServerSchema with any type of body +func NewSaveApmServerSchemaRequestWithBody(server string, params *SaveApmServerSchemaParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/fleet/apm_server_schema") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewCreateAnnotationRequest calls the generic CreateAnnotation builder with application/json body +func NewCreateAnnotationRequest(server string, serviceName string, params *CreateAnnotationParams, body CreateAnnotationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateAnnotationRequestWithBody(server, serviceName, params, "application/json", bodyReader) +} + +// NewCreateAnnotationRequestWithBody generates requests for CreateAnnotation with any type of body +func NewCreateAnnotationRequestWithBody(server string, serviceName string, params *CreateAnnotationParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "serviceName", runtime.ParamLocationPath, serviceName) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/services/%s/annotation", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewGetAnnotationRequest generates requests for GetAnnotation +func NewGetAnnotationRequest(server string, serviceName string, params *GetAnnotationParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "serviceName", runtime.ParamLocationPath, serviceName) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/services/%s/annotation/search", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Environment != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "environment", runtime.ParamLocationQuery, *params.Environment); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Start != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "start", runtime.ParamLocationQuery, *params.Start); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.End != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "end", runtime.ParamLocationQuery, *params.End); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewDeleteAgentConfigurationRequest calls the generic DeleteAgentConfiguration builder with application/json body +func NewDeleteAgentConfigurationRequest(server string, params *DeleteAgentConfigurationParams, body DeleteAgentConfigurationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteAgentConfigurationRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewDeleteAgentConfigurationRequestWithBody generates requests for DeleteAgentConfiguration with any type of body +func NewDeleteAgentConfigurationRequestWithBody(server string, params *DeleteAgentConfigurationParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/settings/agent-configuration") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewGetAgentConfigurationsRequest generates requests for GetAgentConfigurations +func NewGetAgentConfigurationsRequest(server string, params *GetAgentConfigurationsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/settings/agent-configuration") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewCreateUpdateAgentConfigurationRequest calls the generic CreateUpdateAgentConfiguration builder with application/json body +func NewCreateUpdateAgentConfigurationRequest(server string, params *CreateUpdateAgentConfigurationParams, body CreateUpdateAgentConfigurationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateUpdateAgentConfigurationRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewCreateUpdateAgentConfigurationRequestWithBody generates requests for CreateUpdateAgentConfiguration with any type of body +func NewCreateUpdateAgentConfigurationRequestWithBody(server string, params *CreateUpdateAgentConfigurationParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/settings/agent-configuration") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Overwrite != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "overwrite", runtime.ParamLocationQuery, *params.Overwrite); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewGetAgentNameForServiceRequest generates requests for GetAgentNameForService +func NewGetAgentNameForServiceRequest(server string, params *GetAgentNameForServiceParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/settings/agent-configuration/agent_name") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "serviceName", runtime.ParamLocationQuery, params.ServiceName); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewGetEnvironmentsForServiceRequest generates requests for GetEnvironmentsForService +func NewGetEnvironmentsForServiceRequest(server string, params *GetEnvironmentsForServiceParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/settings/agent-configuration/environments") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.ServiceName != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "serviceName", runtime.ParamLocationQuery, *params.ServiceName); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewSearchSingleConfigurationRequest calls the generic SearchSingleConfiguration builder with application/json body +func NewSearchSingleConfigurationRequest(server string, params *SearchSingleConfigurationParams, body SearchSingleConfigurationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewSearchSingleConfigurationRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewSearchSingleConfigurationRequestWithBody generates requests for SearchSingleConfiguration with any type of body +func NewSearchSingleConfigurationRequestWithBody(server string, params *SearchSingleConfigurationParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/settings/agent-configuration/search") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewGetSingleAgentConfigurationRequest generates requests for GetSingleAgentConfiguration +func NewGetSingleAgentConfigurationRequest(server string, params *GetSingleAgentConfigurationParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/settings/agent-configuration/view") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Name != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "name", runtime.ParamLocationQuery, *params.Name); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Environment != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "environment", runtime.ParamLocationQuery, *params.Environment); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewGetSourceMapsRequest generates requests for GetSourceMaps +func NewGetSourceMapsRequest(server string, params *GetSourceMapsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/sourcemaps") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewUploadSourceMapRequestWithBody generates requests for UploadSourceMap with any type of body +func NewUploadSourceMapRequestWithBody(server string, params *UploadSourceMapParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/sourcemaps") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewDeleteSourceMapRequest generates requests for DeleteSourceMap +func NewDeleteSourceMapRequest(server string, id string, params *DeleteSourceMapParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/apm/sourcemaps/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + if params != nil { + + var headerParam0 string + + headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) + if err != nil { + return nil, err + } + + req.Header.Set("elastic-api-version", headerParam0) + + } + + return req, nil +} + +// NewDeleteAssetCriticalityRecordRequest generates requests for DeleteAssetCriticalityRecord +func NewDeleteAssetCriticalityRecordRequest(server string, params *DeleteAssetCriticalityRecordParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/asset_criticality") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id_value", runtime.ParamLocationQuery, params.IdValue); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id_field", runtime.ParamLocationQuery, params.IdField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.Refresh != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "refresh", runtime.ParamLocationQuery, *params.Refresh); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetAssetCriticalityRecordRequest generates requests for GetAssetCriticalityRecord +func NewGetAssetCriticalityRecordRequest(server string, params *GetAssetCriticalityRecordParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/asset_criticality") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id_value", runtime.ParamLocationQuery, params.IdValue); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id_field", runtime.ParamLocationQuery, params.IdField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateAssetCriticalityRecordRequest calls the generic CreateAssetCriticalityRecord builder with application/json body +func NewCreateAssetCriticalityRecordRequest(server string, body CreateAssetCriticalityRecordJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateAssetCriticalityRecordRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateAssetCriticalityRecordRequestWithBody generates requests for CreateAssetCriticalityRecord with any type of body +func NewCreateAssetCriticalityRecordRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/asset_criticality") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewBulkUpsertAssetCriticalityRecordsRequest calls the generic BulkUpsertAssetCriticalityRecords builder with application/json body +func NewBulkUpsertAssetCriticalityRecordsRequest(server string, body BulkUpsertAssetCriticalityRecordsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewBulkUpsertAssetCriticalityRecordsRequestWithBody(server, "application/json", bodyReader) +} + +// NewBulkUpsertAssetCriticalityRecordsRequestWithBody generates requests for BulkUpsertAssetCriticalityRecords with any type of body +func NewBulkUpsertAssetCriticalityRecordsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/asset_criticality/bulk") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindAssetCriticalityRecordsRequest generates requests for FindAssetCriticalityRecords +func NewFindAssetCriticalityRecordsRequest(server string, params *FindAssetCriticalityRecordsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/asset_criticality/list") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortDirection != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_direction", runtime.ParamLocationQuery, *params.SortDirection); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteCaseDefaultSpaceRequest generates requests for DeleteCaseDefaultSpace +func NewDeleteCaseDefaultSpaceRequest(server string, params *DeleteCaseDefaultSpaceParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ids", runtime.ParamLocationQuery, params.Ids); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateCaseDefaultSpaceRequest calls the generic UpdateCaseDefaultSpace builder with application/json body +func NewUpdateCaseDefaultSpaceRequest(server string, body UpdateCaseDefaultSpaceJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateCaseDefaultSpaceRequestWithBody(server, "application/json", bodyReader) +} + +// NewUpdateCaseDefaultSpaceRequestWithBody generates requests for UpdateCaseDefaultSpace with any type of body +func NewUpdateCaseDefaultSpaceRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateCaseDefaultSpaceRequest calls the generic CreateCaseDefaultSpace builder with application/json body +func NewCreateCaseDefaultSpaceRequest(server string, body CreateCaseDefaultSpaceJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateCaseDefaultSpaceRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateCaseDefaultSpaceRequestWithBody generates requests for CreateCaseDefaultSpace with any type of body +func NewCreateCaseDefaultSpaceRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindCasesDefaultSpaceRequest generates requests for FindCasesDefaultSpace +func NewFindCasesDefaultSpaceRequest(server string, params *FindCasesDefaultSpaceParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Assignees != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "assignees", runtime.ParamLocationQuery, *params.Assignees); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Category != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "category", runtime.ParamLocationQuery, *params.Category); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.DefaultSearchOperator != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "defaultSearchOperator", runtime.ParamLocationQuery, *params.DefaultSearchOperator); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.From != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "from", runtime.ParamLocationQuery, *params.From); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Owner != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "owner", runtime.ParamLocationQuery, *params.Owner); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Reporters != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "reporters", runtime.ParamLocationQuery, *params.Reporters); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Search != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "search", runtime.ParamLocationQuery, *params.Search); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SearchFields != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "searchFields", runtime.ParamLocationQuery, *params.SearchFields); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Severity != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "severity", runtime.ParamLocationQuery, *params.Severity); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortField", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Status != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "status", runtime.ParamLocationQuery, *params.Status); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Tags != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "tags", runtime.ParamLocationQuery, *params.Tags); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.To != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "to", runtime.ParamLocationQuery, *params.To); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetCasesByAlertDefaultSpaceRequest generates requests for GetCasesByAlertDefaultSpace +func NewGetCasesByAlertDefaultSpaceRequest(server string, alertId CasesAlertId, params *GetCasesByAlertDefaultSpaceParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "alertId", runtime.ParamLocationPath, alertId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/alerts/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Owner != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "owner", runtime.ParamLocationQuery, *params.Owner); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetCaseConfigurationDefaultSpaceRequest generates requests for GetCaseConfigurationDefaultSpace +func NewGetCaseConfigurationDefaultSpaceRequest(server string, params *GetCaseConfigurationDefaultSpaceParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/configure") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Owner != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "owner", runtime.ParamLocationQuery, *params.Owner); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewSetCaseConfigurationDefaultSpaceRequest calls the generic SetCaseConfigurationDefaultSpace builder with application/json body +func NewSetCaseConfigurationDefaultSpaceRequest(server string, body SetCaseConfigurationDefaultSpaceJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewSetCaseConfigurationDefaultSpaceRequestWithBody(server, "application/json", bodyReader) +} + +// NewSetCaseConfigurationDefaultSpaceRequestWithBody generates requests for SetCaseConfigurationDefaultSpace with any type of body +func NewSetCaseConfigurationDefaultSpaceRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/configure") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindCaseConnectorsDefaultSpaceRequest generates requests for FindCaseConnectorsDefaultSpace +func NewFindCaseConnectorsDefaultSpaceRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/configure/connectors/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateCaseConfigurationDefaultSpaceRequest calls the generic UpdateCaseConfigurationDefaultSpace builder with application/json body +func NewUpdateCaseConfigurationDefaultSpaceRequest(server string, configurationId CasesConfigurationId, body UpdateCaseConfigurationDefaultSpaceJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateCaseConfigurationDefaultSpaceRequestWithBody(server, configurationId, "application/json", bodyReader) +} + +// NewUpdateCaseConfigurationDefaultSpaceRequestWithBody generates requests for UpdateCaseConfigurationDefaultSpace with any type of body +func NewUpdateCaseConfigurationDefaultSpaceRequestWithBody(server string, configurationId CasesConfigurationId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "configurationId", runtime.ParamLocationPath, configurationId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/configure/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetCaseReportersDefaultSpaceRequest generates requests for GetCaseReportersDefaultSpace +func NewGetCaseReportersDefaultSpaceRequest(server string, params *GetCaseReportersDefaultSpaceParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/reporters") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Owner != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "owner", runtime.ParamLocationQuery, *params.Owner); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetCaseTagsDefaultSpaceRequest generates requests for GetCaseTagsDefaultSpace +func NewGetCaseTagsDefaultSpaceRequest(server string, params *GetCaseTagsDefaultSpaceParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/tags") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Owner != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "owner", runtime.ParamLocationQuery, *params.Owner); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetCaseDefaultSpaceRequest generates requests for GetCaseDefaultSpace +func NewGetCaseDefaultSpaceRequest(server string, caseId CasesCaseId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "caseId", runtime.ParamLocationPath, caseId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetCaseAlertsDefaultSpaceRequest generates requests for GetCaseAlertsDefaultSpace +func NewGetCaseAlertsDefaultSpaceRequest(server string, caseId CasesCaseId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "caseId", runtime.ParamLocationPath, caseId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/%s/alerts", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteCaseCommentsDefaultSpaceRequest generates requests for DeleteCaseCommentsDefaultSpace +func NewDeleteCaseCommentsDefaultSpaceRequest(server string, caseId CasesCaseId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "caseId", runtime.ParamLocationPath, caseId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/%s/comments", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateCaseCommentDefaultSpaceRequest calls the generic UpdateCaseCommentDefaultSpace builder with application/json body +func NewUpdateCaseCommentDefaultSpaceRequest(server string, caseId CasesCaseId, body UpdateCaseCommentDefaultSpaceJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateCaseCommentDefaultSpaceRequestWithBody(server, caseId, "application/json", bodyReader) +} + +// NewUpdateCaseCommentDefaultSpaceRequestWithBody generates requests for UpdateCaseCommentDefaultSpace with any type of body +func NewUpdateCaseCommentDefaultSpaceRequestWithBody(server string, caseId CasesCaseId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "caseId", runtime.ParamLocationPath, caseId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/%s/comments", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewAddCaseCommentDefaultSpaceRequest calls the generic AddCaseCommentDefaultSpace builder with application/json body +func NewAddCaseCommentDefaultSpaceRequest(server string, caseId CasesCaseId, body AddCaseCommentDefaultSpaceJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewAddCaseCommentDefaultSpaceRequestWithBody(server, caseId, "application/json", bodyReader) +} + +// NewAddCaseCommentDefaultSpaceRequestWithBody generates requests for AddCaseCommentDefaultSpace with any type of body +func NewAddCaseCommentDefaultSpaceRequestWithBody(server string, caseId CasesCaseId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "caseId", runtime.ParamLocationPath, caseId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/%s/comments", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindCaseCommentsDefaultSpaceRequest generates requests for FindCaseCommentsDefaultSpace +func NewFindCaseCommentsDefaultSpaceRequest(server string, caseId CasesCaseId, params *FindCaseCommentsDefaultSpaceParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "caseId", runtime.ParamLocationPath, caseId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/%s/comments/_find", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteCaseCommentDefaultSpaceRequest generates requests for DeleteCaseCommentDefaultSpace +func NewDeleteCaseCommentDefaultSpaceRequest(server string, caseId CasesCaseId, commentId CasesCommentId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "caseId", runtime.ParamLocationPath, caseId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "commentId", runtime.ParamLocationPath, commentId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/%s/comments/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetCaseCommentDefaultSpaceRequest generates requests for GetCaseCommentDefaultSpace +func NewGetCaseCommentDefaultSpaceRequest(server string, caseId CasesCaseId, commentId CasesCommentId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "caseId", runtime.ParamLocationPath, caseId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "commentId", runtime.ParamLocationPath, commentId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/%s/comments/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPushCaseDefaultSpaceRequest calls the generic PushCaseDefaultSpace builder with application/json body +func NewPushCaseDefaultSpaceRequest(server string, caseId CasesCaseId, connectorId CasesConnectorId, body PushCaseDefaultSpaceJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPushCaseDefaultSpaceRequestWithBody(server, caseId, connectorId, "application/json", bodyReader) +} + +// NewPushCaseDefaultSpaceRequestWithBody generates requests for PushCaseDefaultSpace with any type of body +func NewPushCaseDefaultSpaceRequestWithBody(server string, caseId CasesCaseId, connectorId CasesConnectorId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "caseId", runtime.ParamLocationPath, caseId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "connectorId", runtime.ParamLocationPath, connectorId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/%s/connector/%s/_push", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewAddCaseFileDefaultSpaceRequestWithBody generates requests for AddCaseFileDefaultSpace with any type of body +func NewAddCaseFileDefaultSpaceRequestWithBody(server string, caseId CasesCaseId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "caseId", runtime.ParamLocationPath, caseId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/%s/files", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindCaseActivityDefaultSpaceRequest generates requests for FindCaseActivityDefaultSpace +func NewFindCaseActivityDefaultSpaceRequest(server string, caseId CasesCaseId, params *FindCaseActivityDefaultSpaceParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "caseId", runtime.ParamLocationPath, caseId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/cases/%s/user_actions/_find", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Types != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "types", runtime.ParamLocationQuery, *params.Types); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateFieldsMetadataDefaultRequest calls the generic UpdateFieldsMetadataDefault builder with application/json body +func NewUpdateFieldsMetadataDefaultRequest(server string, viewId DataViewsViewId, body UpdateFieldsMetadataDefaultJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateFieldsMetadataDefaultRequestWithBody(server, viewId, "application/json", bodyReader) +} + +// NewUpdateFieldsMetadataDefaultRequestWithBody generates requests for UpdateFieldsMetadataDefault with any type of body +func NewUpdateFieldsMetadataDefaultRequestWithBody(server string, viewId DataViewsViewId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/data_views/data_view/%s/fields", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateRuntimeFieldDefaultRequest calls the generic CreateRuntimeFieldDefault builder with application/json body +func NewCreateRuntimeFieldDefaultRequest(server string, viewId DataViewsViewId, body CreateRuntimeFieldDefaultJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateRuntimeFieldDefaultRequestWithBody(server, viewId, "application/json", bodyReader) +} + +// NewCreateRuntimeFieldDefaultRequestWithBody generates requests for CreateRuntimeFieldDefault with any type of body +func NewCreateRuntimeFieldDefaultRequestWithBody(server string, viewId DataViewsViewId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/data_views/data_view/%s/runtime_field", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateUpdateRuntimeFieldDefaultRequest calls the generic CreateUpdateRuntimeFieldDefault builder with application/json body +func NewCreateUpdateRuntimeFieldDefaultRequest(server string, viewId string, body CreateUpdateRuntimeFieldDefaultJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateUpdateRuntimeFieldDefaultRequestWithBody(server, viewId, "application/json", bodyReader) +} + +// NewCreateUpdateRuntimeFieldDefaultRequestWithBody generates requests for CreateUpdateRuntimeFieldDefault with any type of body +func NewCreateUpdateRuntimeFieldDefaultRequestWithBody(server string, viewId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/data_views/data_view/%s/runtime_field", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteRuntimeFieldDefaultRequest generates requests for DeleteRuntimeFieldDefault +func NewDeleteRuntimeFieldDefaultRequest(server string, viewId DataViewsViewId, fieldName DataViewsFieldName) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "fieldName", runtime.ParamLocationPath, fieldName) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/data_views/data_view/%s/runtime_field/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetRuntimeFieldDefaultRequest generates requests for GetRuntimeFieldDefault +func NewGetRuntimeFieldDefaultRequest(server string, viewId DataViewsViewId, fieldName DataViewsFieldName) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "fieldName", runtime.ParamLocationPath, fieldName) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/data_views/data_view/%s/runtime_field/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateRuntimeFieldDefaultRequest calls the generic UpdateRuntimeFieldDefault builder with application/json body +func NewUpdateRuntimeFieldDefaultRequest(server string, viewId DataViewsViewId, fieldName DataViewsFieldName, body UpdateRuntimeFieldDefaultJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateRuntimeFieldDefaultRequestWithBody(server, viewId, fieldName, "application/json", bodyReader) +} + +// NewUpdateRuntimeFieldDefaultRequestWithBody generates requests for UpdateRuntimeFieldDefault with any type of body +func NewUpdateRuntimeFieldDefaultRequestWithBody(server string, viewId DataViewsViewId, fieldName DataViewsFieldName, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "fieldName", runtime.ParamLocationPath, fieldName) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/data_views/data_view/%s/runtime_field/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetDefaultDataViewDefaultRequest generates requests for GetDefaultDataViewDefault +func NewGetDefaultDataViewDefaultRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/data_views/default") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewSetDefaultDatailViewDefaultRequest calls the generic SetDefaultDatailViewDefault builder with application/json body +func NewSetDefaultDatailViewDefaultRequest(server string, body SetDefaultDatailViewDefaultJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewSetDefaultDatailViewDefaultRequestWithBody(server, "application/json", bodyReader) +} + +// NewSetDefaultDatailViewDefaultRequestWithBody generates requests for SetDefaultDatailViewDefault with any type of body +func NewSetDefaultDatailViewDefaultRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/data_views/default") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewSwapDataViewsDefaultRequest calls the generic SwapDataViewsDefault builder with application/json body +func NewSwapDataViewsDefaultRequest(server string, body SwapDataViewsDefaultJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewSwapDataViewsDefaultRequestWithBody(server, "application/json", bodyReader) +} + +// NewSwapDataViewsDefaultRequestWithBody generates requests for SwapDataViewsDefault with any type of body +func NewSwapDataViewsDefaultRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/data_views/swap_references") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPreviewSwapDataViewsDefaultRequest calls the generic PreviewSwapDataViewsDefault builder with application/json body +func NewPreviewSwapDataViewsDefaultRequest(server string, body PreviewSwapDataViewsDefaultJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPreviewSwapDataViewsDefaultRequestWithBody(server, "application/json", bodyReader) +} + +// NewPreviewSwapDataViewsDefaultRequestWithBody generates requests for PreviewSwapDataViewsDefault with any type of body +func NewPreviewSwapDataViewsDefaultRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/data_views/swap_references/_preview") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteAlertsIndexRequest generates requests for DeleteAlertsIndex +func NewDeleteAlertsIndexRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/index") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadAlertsIndexRequest generates requests for ReadAlertsIndex +func NewReadAlertsIndexRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/index") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateAlertsIndexRequest generates requests for CreateAlertsIndex +func NewCreateAlertsIndexRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/index") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadPrivilegesRequest generates requests for ReadPrivileges +func NewReadPrivilegesRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/privileges") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteRuleRequest generates requests for DeleteRule +func NewDeleteRuleRequest(server string, params *DeleteRuleParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.RuleId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "rule_id", runtime.ParamLocationQuery, *params.RuleId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadRuleRequest generates requests for ReadRule +func NewReadRuleRequest(server string, params *ReadRuleParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.RuleId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "rule_id", runtime.ParamLocationQuery, *params.RuleId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPatchRuleRequest calls the generic PatchRule builder with application/json body +func NewPatchRuleRequest(server string, body PatchRuleJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchRuleRequestWithBody(server, "application/json", bodyReader) +} + +// NewPatchRuleRequestWithBody generates requests for PatchRule with any type of body +func NewPatchRuleRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateRuleRequest calls the generic CreateRule builder with application/json body +func NewCreateRuleRequest(server string, body CreateRuleJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateRuleRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateRuleRequestWithBody generates requests for CreateRule with any type of body +func NewCreateRuleRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewUpdateRuleRequest calls the generic UpdateRule builder with application/json body +func NewUpdateRuleRequest(server string, body UpdateRuleJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateRuleRequestWithBody(server, "application/json", bodyReader) +} + +// NewUpdateRuleRequestWithBody generates requests for UpdateRule with any type of body +func NewUpdateRuleRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPerformRulesBulkActionRequest calls the generic PerformRulesBulkAction builder with application/json body +func NewPerformRulesBulkActionRequest(server string, params *PerformRulesBulkActionParams, body PerformRulesBulkActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPerformRulesBulkActionRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPerformRulesBulkActionRequestWithBody generates requests for PerformRulesBulkAction with any type of body +func NewPerformRulesBulkActionRequestWithBody(server string, params *PerformRulesBulkActionParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules/_bulk_action") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.DryRun != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "dry_run", runtime.ParamLocationQuery, *params.DryRun); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewExportRulesRequest calls the generic ExportRules builder with application/json body +func NewExportRulesRequest(server string, params *ExportRulesParams, body ExportRulesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewExportRulesRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewExportRulesRequestWithBody generates requests for ExportRules with any type of body +func NewExportRulesRequestWithBody(server string, params *ExportRulesParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules/_export") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.ExcludeExportDetails != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "exclude_export_details", runtime.ParamLocationQuery, *params.ExcludeExportDetails); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.FileName != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "file_name", runtime.ParamLocationQuery, *params.FileName); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindRulesRequest generates requests for FindRules +func NewFindRulesRequest(server string, params *FindRulesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Fields != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "fields", runtime.ParamLocationQuery, *params.Fields); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.GapsRangeStart != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "gaps_range_start", runtime.ParamLocationQuery, *params.GapsRangeStart); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.GapsRangeEnd != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "gaps_range_end", runtime.ParamLocationQuery, *params.GapsRangeEnd); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewImportRulesRequestWithBody generates requests for ImportRules with any type of body +func NewImportRulesRequestWithBody(server string, params *ImportRulesParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules/_import") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Overwrite != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "overwrite", runtime.ParamLocationQuery, *params.Overwrite); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OverwriteExceptions != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "overwrite_exceptions", runtime.ParamLocationQuery, *params.OverwriteExceptions); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OverwriteActionConnectors != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "overwrite_action_connectors", runtime.ParamLocationQuery, *params.OverwriteActionConnectors); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.AsNewList != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "as_new_list", runtime.ParamLocationQuery, *params.AsNewList); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewInstallPrebuiltRulesAndTimelinesRequest generates requests for InstallPrebuiltRulesAndTimelines +func NewInstallPrebuiltRulesAndTimelinesRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules/prepackaged") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadPrebuiltRulesAndTimelinesStatusRequest generates requests for ReadPrebuiltRulesAndTimelinesStatus +func NewReadPrebuiltRulesAndTimelinesStatusRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules/prepackaged/_status") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewRulePreviewRequest calls the generic RulePreview builder with application/json body +func NewRulePreviewRequest(server string, params *RulePreviewParams, body RulePreviewJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewRulePreviewRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewRulePreviewRequestWithBody generates requests for RulePreview with any type of body +func NewRulePreviewRequestWithBody(server string, params *RulePreviewParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules/preview") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.EnableLoggedRequests != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enable_logged_requests", runtime.ParamLocationQuery, *params.EnableLoggedRequests); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateRuleExceptionListItemsRequest calls the generic CreateRuleExceptionListItems builder with application/json body +func NewCreateRuleExceptionListItemsRequest(server string, id SecurityExceptionsAPIRuleId, body CreateRuleExceptionListItemsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateRuleExceptionListItemsRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewCreateRuleExceptionListItemsRequestWithBody generates requests for CreateRuleExceptionListItems with any type of body +func NewCreateRuleExceptionListItemsRequestWithBody(server string, id SecurityExceptionsAPIRuleId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/rules/%s/exceptions", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewSetAlertAssigneesRequest calls the generic SetAlertAssignees builder with application/json body +func NewSetAlertAssigneesRequest(server string, body SetAlertAssigneesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewSetAlertAssigneesRequestWithBody(server, "application/json", bodyReader) +} + +// NewSetAlertAssigneesRequestWithBody generates requests for SetAlertAssignees with any type of body +func NewSetAlertAssigneesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/signals/assignees") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFinalizeAlertsMigrationRequest calls the generic FinalizeAlertsMigration builder with application/json body +func NewFinalizeAlertsMigrationRequest(server string, body FinalizeAlertsMigrationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewFinalizeAlertsMigrationRequestWithBody(server, "application/json", bodyReader) +} + +// NewFinalizeAlertsMigrationRequestWithBody generates requests for FinalizeAlertsMigration with any type of body +func NewFinalizeAlertsMigrationRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/signals/finalize_migration") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewAlertsMigrationCleanupRequest calls the generic AlertsMigrationCleanup builder with application/json body +func NewAlertsMigrationCleanupRequest(server string, body AlertsMigrationCleanupJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewAlertsMigrationCleanupRequestWithBody(server, "application/json", bodyReader) +} + +// NewAlertsMigrationCleanupRequestWithBody generates requests for AlertsMigrationCleanup with any type of body +func NewAlertsMigrationCleanupRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/signals/migration") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateAlertsMigrationRequest calls the generic CreateAlertsMigration builder with application/json body +func NewCreateAlertsMigrationRequest(server string, body CreateAlertsMigrationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateAlertsMigrationRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateAlertsMigrationRequestWithBody generates requests for CreateAlertsMigration with any type of body +func NewCreateAlertsMigrationRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/signals/migration") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewReadAlertsMigrationStatusRequest generates requests for ReadAlertsMigrationStatus +func NewReadAlertsMigrationStatusRequest(server string, params *ReadAlertsMigrationStatusParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/signals/migration_status") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "from", runtime.ParamLocationQuery, params.From); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewSearchAlertsRequest calls the generic SearchAlerts builder with application/json body +func NewSearchAlertsRequest(server string, body SearchAlertsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewSearchAlertsRequestWithBody(server, "application/json", bodyReader) +} + +// NewSearchAlertsRequestWithBody generates requests for SearchAlerts with any type of body +func NewSearchAlertsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/signals/search") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewSetAlertsStatusRequest calls the generic SetAlertsStatus builder with application/json body +func NewSetAlertsStatusRequest(server string, body SetAlertsStatusJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewSetAlertsStatusRequestWithBody(server, "application/json", bodyReader) +} + +// NewSetAlertsStatusRequestWithBody generates requests for SetAlertsStatus with any type of body +func NewSetAlertsStatusRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/signals/status") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewSetAlertTagsRequest calls the generic SetAlertTags builder with application/json body +func NewSetAlertTagsRequest(server string, body SetAlertTagsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewSetAlertTagsRequestWithBody(server, "application/json", bodyReader) +} + +// NewSetAlertTagsRequestWithBody generates requests for SetAlertTags with any type of body +func NewSetAlertTagsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/signals/tags") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewReadTagsRequest generates requests for ReadTags +func NewReadTagsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/detection_engine/tags") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewRotateEncryptionKeyRequest generates requests for RotateEncryptionKey +func NewRotateEncryptionKeyRequest(server string, params *RotateEncryptionKeyParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/encrypted_saved_objects/_rotate_key") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.BatchSize != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "batch_size", runtime.ParamLocationQuery, *params.BatchSize); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Type != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "type", runtime.ParamLocationQuery, *params.Type); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewEndpointGetActionsListRequest generates requests for EndpointGetActionsList +func NewEndpointGetActionsListRequest(server string, params *EndpointGetActionsListParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PageSize != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "pageSize", runtime.ParamLocationQuery, *params.PageSize); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Commands != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "commands", runtime.ParamLocationQuery, *params.Commands); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.AgentIds != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "agentIds", runtime.ParamLocationQuery, *params.AgentIds); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.UserIds != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "userIds", runtime.ParamLocationQuery, *params.UserIds); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.StartDate != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "startDate", runtime.ParamLocationQuery, *params.StartDate); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.EndDate != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "endDate", runtime.ParamLocationQuery, *params.EndDate); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.AgentTypes != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "agentTypes", runtime.ParamLocationQuery, *params.AgentTypes); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.WithOutputs != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withOutputs", runtime.ParamLocationQuery, *params.WithOutputs); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Types != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "types", runtime.ParamLocationQuery, *params.Types); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewEndpointExecuteActionRequest calls the generic EndpointExecuteAction builder with application/json body +func NewEndpointExecuteActionRequest(server string, body EndpointExecuteActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewEndpointExecuteActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewEndpointExecuteActionRequestWithBody generates requests for EndpointExecuteAction with any type of body +func NewEndpointExecuteActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/execute") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewEndpointGetFileActionRequest calls the generic EndpointGetFileAction builder with application/json body +func NewEndpointGetFileActionRequest(server string, body EndpointGetFileActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewEndpointGetFileActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewEndpointGetFileActionRequestWithBody generates requests for EndpointGetFileAction with any type of body +func NewEndpointGetFileActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/get_file") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewEndpointIsolateActionRequest calls the generic EndpointIsolateAction builder with application/json body +func NewEndpointIsolateActionRequest(server string, body EndpointIsolateActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewEndpointIsolateActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewEndpointIsolateActionRequestWithBody generates requests for EndpointIsolateAction with any type of body +func NewEndpointIsolateActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/isolate") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewEndpointKillProcessActionRequest calls the generic EndpointKillProcessAction builder with application/json body +func NewEndpointKillProcessActionRequest(server string, body EndpointKillProcessActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewEndpointKillProcessActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewEndpointKillProcessActionRequestWithBody generates requests for EndpointKillProcessAction with any type of body +func NewEndpointKillProcessActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/kill_process") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewEndpointGetProcessesActionRequest calls the generic EndpointGetProcessesAction builder with application/json body +func NewEndpointGetProcessesActionRequest(server string, body EndpointGetProcessesActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewEndpointGetProcessesActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewEndpointGetProcessesActionRequestWithBody generates requests for EndpointGetProcessesAction with any type of body +func NewEndpointGetProcessesActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/running_procs") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewRunScriptActionRequest calls the generic RunScriptAction builder with application/json body +func NewRunScriptActionRequest(server string, body RunScriptActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewRunScriptActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewRunScriptActionRequestWithBody generates requests for RunScriptAction with any type of body +func NewRunScriptActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/runscript") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewEndpointScanActionRequest calls the generic EndpointScanAction builder with application/json body +func NewEndpointScanActionRequest(server string, body EndpointScanActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewEndpointScanActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewEndpointScanActionRequestWithBody generates requests for EndpointScanAction with any type of body +func NewEndpointScanActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/scan") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewEndpointGetActionsStateRequest generates requests for EndpointGetActionsState +func NewEndpointGetActionsStateRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/state") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewEndpointSuspendProcessActionRequest calls the generic EndpointSuspendProcessAction builder with application/json body +func NewEndpointSuspendProcessActionRequest(server string, body EndpointSuspendProcessActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewEndpointSuspendProcessActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewEndpointSuspendProcessActionRequestWithBody generates requests for EndpointSuspendProcessAction with any type of body +func NewEndpointSuspendProcessActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/suspend_process") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewEndpointUnisolateActionRequest calls the generic EndpointUnisolateAction builder with application/json body +func NewEndpointUnisolateActionRequest(server string, body EndpointUnisolateActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewEndpointUnisolateActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewEndpointUnisolateActionRequestWithBody generates requests for EndpointUnisolateAction with any type of body +func NewEndpointUnisolateActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/unisolate") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewEndpointUploadActionRequestWithBody generates requests for EndpointUploadAction with any type of body +func NewEndpointUploadActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/upload") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewEndpointGetActionsDetailsRequest generates requests for EndpointGetActionsDetails +func NewEndpointGetActionsDetailsRequest(server string, actionId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "action_id", runtime.ParamLocationPath, actionId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewEndpointFileInfoRequest generates requests for EndpointFileInfo +func NewEndpointFileInfoRequest(server string, actionId string, fileId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "action_id", runtime.ParamLocationPath, actionId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "file_id", runtime.ParamLocationPath, fileId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/%s/file/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewEndpointFileDownloadRequest generates requests for EndpointFileDownload +func NewEndpointFileDownloadRequest(server string, actionId string, fileId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "action_id", runtime.ParamLocationPath, actionId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "file_id", runtime.ParamLocationPath, fileId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action/%s/file/%s/download", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewEndpointGetActionsStatusRequest generates requests for EndpointGetActionsStatus +func NewEndpointGetActionsStatusRequest(server string, params *EndpointGetActionsStatusParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/action_status") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "query", runtime.ParamLocationQuery, params.Query); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetEndpointMetadataListRequest generates requests for GetEndpointMetadataList +func NewGetEndpointMetadataListRequest(server string, params *GetEndpointMetadataListParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/metadata") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PageSize != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "pageSize", runtime.ParamLocationQuery, *params.PageSize); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "hostStatuses", runtime.ParamLocationQuery, params.HostStatuses); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortField", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortDirection != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortDirection", runtime.ParamLocationQuery, *params.SortDirection); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetEndpointMetadataRequest generates requests for GetEndpointMetadata +func NewGetEndpointMetadataRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/metadata/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetPolicyResponseRequest generates requests for GetPolicyResponse +func NewGetPolicyResponseRequest(server string, params *GetPolicyResponseParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/policy_response") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "query", runtime.ParamLocationQuery, params.Query); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetProtectionUpdatesNoteRequest generates requests for GetProtectionUpdatesNote +func NewGetProtectionUpdatesNoteRequest(server string, packagePolicyId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "package_policy_id", runtime.ParamLocationPath, packagePolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/protection_updates_note/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateUpdateProtectionUpdatesNoteRequest calls the generic CreateUpdateProtectionUpdatesNote builder with application/json body +func NewCreateUpdateProtectionUpdatesNoteRequest(server string, packagePolicyId string, body CreateUpdateProtectionUpdatesNoteJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateUpdateProtectionUpdatesNoteRequestWithBody(server, packagePolicyId, "application/json", bodyReader) +} + +// NewCreateUpdateProtectionUpdatesNoteRequestWithBody generates requests for CreateUpdateProtectionUpdatesNote with any type of body +func NewCreateUpdateProtectionUpdatesNoteRequestWithBody(server string, packagePolicyId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "package_policy_id", runtime.ParamLocationPath, packagePolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint/protection_updates_note/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateEndpointListRequest generates requests for CreateEndpointList +func NewCreateEndpointListRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint_list") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteEndpointListItemRequest generates requests for DeleteEndpointListItem +func NewDeleteEndpointListItemRequest(server string, params *DeleteEndpointListItemParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint_list/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ItemId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "item_id", runtime.ParamLocationQuery, *params.ItemId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadEndpointListItemRequest generates requests for ReadEndpointListItem +func NewReadEndpointListItemRequest(server string, params *ReadEndpointListItemParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint_list/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ItemId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "item_id", runtime.ParamLocationQuery, *params.ItemId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateEndpointListItemRequest calls the generic CreateEndpointListItem builder with application/json body +func NewCreateEndpointListItemRequest(server string, body CreateEndpointListItemJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateEndpointListItemRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateEndpointListItemRequestWithBody generates requests for CreateEndpointListItem with any type of body +func NewCreateEndpointListItemRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint_list/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewUpdateEndpointListItemRequest calls the generic UpdateEndpointListItem builder with application/json body +func NewUpdateEndpointListItemRequest(server string, body UpdateEndpointListItemJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateEndpointListItemRequestWithBody(server, "application/json", bodyReader) +} + +// NewUpdateEndpointListItemRequestWithBody generates requests for UpdateEndpointListItem with any type of body +func NewUpdateEndpointListItemRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint_list/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindEndpointListItemsRequest generates requests for FindEndpointListItems +func NewFindEndpointListItemsRequest(server string, params *FindEndpointListItemsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/endpoint_list/items/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteMonitoringEngineRequest generates requests for DeleteMonitoringEngine +func NewDeleteMonitoringEngineRequest(server string, params *DeleteMonitoringEngineParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/monitoring/engine/delete") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Data != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "data", runtime.ParamLocationQuery, *params.Data); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDisableMonitoringEngineRequest generates requests for DisableMonitoringEngine +func NewDisableMonitoringEngineRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/monitoring/engine/disable") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewInitMonitoringEngineRequest generates requests for InitMonitoringEngine +func NewInitMonitoringEngineRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/monitoring/engine/init") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewScheduleMonitoringEngineRequest generates requests for ScheduleMonitoringEngine +func NewScheduleMonitoringEngineRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/monitoring/engine/schedule_now") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPrivMonHealthRequest generates requests for PrivMonHealth +func NewPrivMonHealthRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/monitoring/privileges/health") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPrivMonPrivilegesRequest generates requests for PrivMonPrivileges +func NewPrivMonPrivilegesRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/monitoring/privileges/privileges") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreatePrivMonUserRequest calls the generic CreatePrivMonUser builder with application/json body +func NewCreatePrivMonUserRequest(server string, body CreatePrivMonUserJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreatePrivMonUserRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreatePrivMonUserRequestWithBody generates requests for CreatePrivMonUser with any type of body +func NewCreatePrivMonUserRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/monitoring/users") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPrivmonBulkUploadUsersCSVRequestWithBody generates requests for PrivmonBulkUploadUsersCSV with any type of body +func NewPrivmonBulkUploadUsersCSVRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/monitoring/users/_csv") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewListPrivMonUsersRequest generates requests for ListPrivMonUsers +func NewListPrivMonUsersRequest(server string, params *ListPrivMonUsersParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/monitoring/users/list") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Kql != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kql", runtime.ParamLocationQuery, *params.Kql); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeletePrivMonUserRequest generates requests for DeletePrivMonUser +func NewDeletePrivMonUserRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/monitoring/users/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdatePrivMonUserRequest calls the generic UpdatePrivMonUser builder with application/json body +func NewUpdatePrivMonUserRequest(server string, id string, body UpdatePrivMonUserJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdatePrivMonUserRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewUpdatePrivMonUserRequestWithBody generates requests for UpdatePrivMonUser with any type of body +func NewUpdatePrivMonUserRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/monitoring/users/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewInstallPrivilegedAccessDetectionPackageRequest generates requests for InstallPrivilegedAccessDetectionPackage +func NewInstallPrivilegedAccessDetectionPackageRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/privileged_user_monitoring/pad/install") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetPrivilegedAccessDetectionPackageStatusRequest generates requests for GetPrivilegedAccessDetectionPackageStatus +func NewGetPrivilegedAccessDetectionPackageStatusRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_analytics/privileged_user_monitoring/pad/status") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewInitEntityStoreRequest calls the generic InitEntityStore builder with application/json body +func NewInitEntityStoreRequest(server string, body InitEntityStoreJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewInitEntityStoreRequestWithBody(server, "application/json", bodyReader) +} + +// NewInitEntityStoreRequestWithBody generates requests for InitEntityStore with any type of body +func NewInitEntityStoreRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_store/enable") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewListEntityEnginesRequest generates requests for ListEntityEngines +func NewListEntityEnginesRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_store/engines") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewApplyEntityEngineDataviewIndicesRequest generates requests for ApplyEntityEngineDataviewIndices +func NewApplyEntityEngineDataviewIndicesRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_store/engines/apply_dataview_indices") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteEntityEngineRequest generates requests for DeleteEntityEngine +func NewDeleteEntityEngineRequest(server string, entityType SecurityEntityAnalyticsAPIEntityType, params *DeleteEntityEngineParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "entityType", runtime.ParamLocationPath, entityType) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_store/engines/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Data != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "data", runtime.ParamLocationQuery, *params.Data); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetEntityEngineRequest generates requests for GetEntityEngine +func NewGetEntityEngineRequest(server string, entityType SecurityEntityAnalyticsAPIEntityType) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "entityType", runtime.ParamLocationPath, entityType) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_store/engines/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewInitEntityEngineRequest calls the generic InitEntityEngine builder with application/json body +func NewInitEntityEngineRequest(server string, entityType SecurityEntityAnalyticsAPIEntityType, body InitEntityEngineJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewInitEntityEngineRequestWithBody(server, entityType, "application/json", bodyReader) +} + +// NewInitEntityEngineRequestWithBody generates requests for InitEntityEngine with any type of body +func NewInitEntityEngineRequestWithBody(server string, entityType SecurityEntityAnalyticsAPIEntityType, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "entityType", runtime.ParamLocationPath, entityType) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_store/engines/%s/init", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewStartEntityEngineRequest generates requests for StartEntityEngine +func NewStartEntityEngineRequest(server string, entityType SecurityEntityAnalyticsAPIEntityType) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "entityType", runtime.ParamLocationPath, entityType) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_store/engines/%s/start", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewStopEntityEngineRequest generates requests for StopEntityEngine +func NewStopEntityEngineRequest(server string, entityType SecurityEntityAnalyticsAPIEntityType) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "entityType", runtime.ParamLocationPath, entityType) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_store/engines/%s/stop", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewListEntitiesRequest generates requests for ListEntities +func NewListEntitiesRequest(server string, params *ListEntitiesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_store/entities/list") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.FilterQuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filterQuery", runtime.ParamLocationQuery, *params.FilterQuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "entity_types", runtime.ParamLocationQuery, params.EntityTypes); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetEntityStoreStatusRequest generates requests for GetEntityStoreStatus +func NewGetEntityStoreStatusRequest(server string, params *GetEntityStoreStatusParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/entity_store/status") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.IncludeComponents != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "include_components", runtime.ParamLocationQuery, *params.IncludeComponents); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteExceptionListRequest generates requests for DeleteExceptionList +func NewDeleteExceptionListRequest(server string, params *DeleteExceptionListParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ListId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "list_id", runtime.ParamLocationQuery, *params.ListId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.NamespaceType != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace_type", runtime.ParamLocationQuery, *params.NamespaceType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadExceptionListRequest generates requests for ReadExceptionList +func NewReadExceptionListRequest(server string, params *ReadExceptionListParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ListId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "list_id", runtime.ParamLocationQuery, *params.ListId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.NamespaceType != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace_type", runtime.ParamLocationQuery, *params.NamespaceType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateExceptionListRequest calls the generic CreateExceptionList builder with application/json body +func NewCreateExceptionListRequest(server string, body CreateExceptionListJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateExceptionListRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateExceptionListRequestWithBody generates requests for CreateExceptionList with any type of body +func NewCreateExceptionListRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewUpdateExceptionListRequest calls the generic UpdateExceptionList builder with application/json body +func NewUpdateExceptionListRequest(server string, body UpdateExceptionListJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateExceptionListRequestWithBody(server, "application/json", bodyReader) +} + +// NewUpdateExceptionListRequestWithBody generates requests for UpdateExceptionList with any type of body +func NewUpdateExceptionListRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDuplicateExceptionListRequest generates requests for DuplicateExceptionList +func NewDuplicateExceptionListRequest(server string, params *DuplicateExceptionListParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists/_duplicate") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "list_id", runtime.ParamLocationQuery, params.ListId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace_type", runtime.ParamLocationQuery, params.NamespaceType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "include_expired_exceptions", runtime.ParamLocationQuery, params.IncludeExpiredExceptions); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewExportExceptionListRequest generates requests for ExportExceptionList +func NewExportExceptionListRequest(server string, params *ExportExceptionListParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists/_export") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "list_id", runtime.ParamLocationQuery, params.ListId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace_type", runtime.ParamLocationQuery, params.NamespaceType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "include_expired_exceptions", runtime.ParamLocationQuery, params.IncludeExpiredExceptions); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewFindExceptionListsRequest generates requests for FindExceptionLists +func NewFindExceptionListsRequest(server string, params *FindExceptionListsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.NamespaceType != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace_type", runtime.ParamLocationQuery, *params.NamespaceType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewImportExceptionListRequestWithBody generates requests for ImportExceptionList with any type of body +func NewImportExceptionListRequestWithBody(server string, params *ImportExceptionListParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists/_import") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Overwrite != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "overwrite", runtime.ParamLocationQuery, *params.Overwrite); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.AsNewList != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "as_new_list", runtime.ParamLocationQuery, *params.AsNewList); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteExceptionListItemRequest generates requests for DeleteExceptionListItem +func NewDeleteExceptionListItemRequest(server string, params *DeleteExceptionListItemParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ItemId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "item_id", runtime.ParamLocationQuery, *params.ItemId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.NamespaceType != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace_type", runtime.ParamLocationQuery, *params.NamespaceType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadExceptionListItemRequest generates requests for ReadExceptionListItem +func NewReadExceptionListItemRequest(server string, params *ReadExceptionListItemParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ItemId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "item_id", runtime.ParamLocationQuery, *params.ItemId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.NamespaceType != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace_type", runtime.ParamLocationQuery, *params.NamespaceType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateExceptionListItemRequest calls the generic CreateExceptionListItem builder with application/json body +func NewCreateExceptionListItemRequest(server string, body CreateExceptionListItemJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateExceptionListItemRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateExceptionListItemRequestWithBody generates requests for CreateExceptionListItem with any type of body +func NewCreateExceptionListItemRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewUpdateExceptionListItemRequest calls the generic UpdateExceptionListItem builder with application/json body +func NewUpdateExceptionListItemRequest(server string, body UpdateExceptionListItemJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateExceptionListItemRequestWithBody(server, "application/json", bodyReader) +} + +// NewUpdateExceptionListItemRequestWithBody generates requests for UpdateExceptionListItem with any type of body +func NewUpdateExceptionListItemRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindExceptionListItemsRequest generates requests for FindExceptionListItems +func NewFindExceptionListItemsRequest(server string, params *FindExceptionListItemsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists/items/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "list_id", runtime.ParamLocationQuery, params.ListId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.NamespaceType != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace_type", runtime.ParamLocationQuery, *params.NamespaceType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Search != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "search", runtime.ParamLocationQuery, *params.Search); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadExceptionListSummaryRequest generates requests for ReadExceptionListSummary +func NewReadExceptionListSummaryRequest(server string, params *ReadExceptionListSummaryParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exception_lists/summary") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ListId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "list_id", runtime.ParamLocationQuery, *params.ListId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.NamespaceType != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "namespace_type", runtime.ParamLocationQuery, *params.NamespaceType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateSharedExceptionListRequest calls the generic CreateSharedExceptionList builder with application/json body +func NewCreateSharedExceptionListRequest(server string, body CreateSharedExceptionListJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateSharedExceptionListRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateSharedExceptionListRequestWithBody generates requests for CreateSharedExceptionList with any type of body +func NewCreateSharedExceptionListRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/exceptions/shared") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFeaturesRequest generates requests for GetFeatures +func NewGetFeaturesRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/features") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentDownloadSourcesRequest generates requests for GetFleetAgentDownloadSources +func NewGetFleetAgentDownloadSourcesRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_download_sources") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetAgentDownloadSourcesRequest calls the generic PostFleetAgentDownloadSources builder with application/json body +func NewPostFleetAgentDownloadSourcesRequest(server string, body PostFleetAgentDownloadSourcesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentDownloadSourcesRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetAgentDownloadSourcesRequestWithBody generates requests for PostFleetAgentDownloadSources with any type of body +func NewPostFleetAgentDownloadSourcesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_download_sources") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteFleetAgentDownloadSourcesSourceidRequest generates requests for DeleteFleetAgentDownloadSourcesSourceid +func NewDeleteFleetAgentDownloadSourcesSourceidRequest(server string, sourceId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "sourceId", runtime.ParamLocationPath, sourceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_download_sources/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentDownloadSourcesSourceidRequest generates requests for GetFleetAgentDownloadSourcesSourceid +func NewGetFleetAgentDownloadSourcesSourceidRequest(server string, sourceId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "sourceId", runtime.ParamLocationPath, sourceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_download_sources/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutFleetAgentDownloadSourcesSourceidRequest calls the generic PutFleetAgentDownloadSourcesSourceid builder with application/json body +func NewPutFleetAgentDownloadSourcesSourceidRequest(server string, sourceId string, body PutFleetAgentDownloadSourcesSourceidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutFleetAgentDownloadSourcesSourceidRequestWithBody(server, sourceId, "application/json", bodyReader) +} + +// NewPutFleetAgentDownloadSourcesSourceidRequestWithBody generates requests for PutFleetAgentDownloadSourcesSourceid with any type of body +func NewPutFleetAgentDownloadSourcesSourceidRequestWithBody(server string, sourceId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "sourceId", runtime.ParamLocationPath, sourceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_download_sources/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetAgentPoliciesRequest generates requests for GetFleetAgentPolicies +func NewGetFleetAgentPoliciesRequest(server string, params *GetFleetAgentPoliciesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortField", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ShowUpgradeable != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "showUpgradeable", runtime.ParamLocationQuery, *params.ShowUpgradeable); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.NoAgentCount != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "noAgentCount", runtime.ParamLocationQuery, *params.NoAgentCount); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.WithAgentCount != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withAgentCount", runtime.ParamLocationQuery, *params.WithAgentCount); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Full != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "full", runtime.ParamLocationQuery, *params.Full); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetAgentPoliciesRequest calls the generic PostFleetAgentPolicies builder with application/json body +func NewPostFleetAgentPoliciesRequest(server string, params *PostFleetAgentPoliciesParams, body PostFleetAgentPoliciesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentPoliciesRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostFleetAgentPoliciesRequestWithBody generates requests for PostFleetAgentPolicies with any type of body +func NewPostFleetAgentPoliciesRequestWithBody(server string, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.SysMonitoring != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sys_monitoring", runtime.ParamLocationQuery, *params.SysMonitoring); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentPoliciesBulkGetRequest calls the generic PostFleetAgentPoliciesBulkGet builder with application/json body +func NewPostFleetAgentPoliciesBulkGetRequest(server string, params *PostFleetAgentPoliciesBulkGetParams, body PostFleetAgentPoliciesBulkGetJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentPoliciesBulkGetRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostFleetAgentPoliciesBulkGetRequestWithBody generates requests for PostFleetAgentPoliciesBulkGet with any type of body +func NewPostFleetAgentPoliciesBulkGetRequestWithBody(server string, params *PostFleetAgentPoliciesBulkGetParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies/_bulk_get") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentPoliciesDeleteRequest calls the generic PostFleetAgentPoliciesDelete builder with application/json body +func NewPostFleetAgentPoliciesDeleteRequest(server string, body PostFleetAgentPoliciesDeleteJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentPoliciesDeleteRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetAgentPoliciesDeleteRequestWithBody generates requests for PostFleetAgentPoliciesDelete with any type of body +func NewPostFleetAgentPoliciesDeleteRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies/delete") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentPoliciesOutputsRequest calls the generic PostFleetAgentPoliciesOutputs builder with application/json body +func NewPostFleetAgentPoliciesOutputsRequest(server string, body PostFleetAgentPoliciesOutputsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentPoliciesOutputsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetAgentPoliciesOutputsRequestWithBody generates requests for PostFleetAgentPoliciesOutputs with any type of body +func NewPostFleetAgentPoliciesOutputsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies/outputs") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetAgentPoliciesAgentpolicyidRequest generates requests for GetFleetAgentPoliciesAgentpolicyid +func NewGetFleetAgentPoliciesAgentpolicyidRequest(server string, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentPolicyId", runtime.ParamLocationPath, agentPolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutFleetAgentPoliciesAgentpolicyidRequest calls the generic PutFleetAgentPoliciesAgentpolicyid builder with application/json body +func NewPutFleetAgentPoliciesAgentpolicyidRequest(server string, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, body PutFleetAgentPoliciesAgentpolicyidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutFleetAgentPoliciesAgentpolicyidRequestWithBody(server, agentPolicyId, params, "application/json", bodyReader) +} + +// NewPutFleetAgentPoliciesAgentpolicyidRequestWithBody generates requests for PutFleetAgentPoliciesAgentpolicyid with any type of body +func NewPutFleetAgentPoliciesAgentpolicyidRequestWithBody(server string, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentPolicyId", runtime.ParamLocationPath, agentPolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusRequest generates requests for GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatus +func NewGetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusRequest(server string, agentPolicyId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentPolicyId", runtime.ParamLocationPath, agentPolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies/%s/auto_upgrade_agents_status", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetAgentPoliciesAgentpolicyidCopyRequest calls the generic PostFleetAgentPoliciesAgentpolicyidCopy builder with application/json body +func NewPostFleetAgentPoliciesAgentpolicyidCopyRequest(server string, agentPolicyId string, params *PostFleetAgentPoliciesAgentpolicyidCopyParams, body PostFleetAgentPoliciesAgentpolicyidCopyJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentPoliciesAgentpolicyidCopyRequestWithBody(server, agentPolicyId, params, "application/json", bodyReader) +} + +// NewPostFleetAgentPoliciesAgentpolicyidCopyRequestWithBody generates requests for PostFleetAgentPoliciesAgentpolicyidCopy with any type of body +func NewPostFleetAgentPoliciesAgentpolicyidCopyRequestWithBody(server string, agentPolicyId string, params *PostFleetAgentPoliciesAgentpolicyidCopyParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentPolicyId", runtime.ParamLocationPath, agentPolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies/%s/copy", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetAgentPoliciesAgentpolicyidDownloadRequest generates requests for GetFleetAgentPoliciesAgentpolicyidDownload +func NewGetFleetAgentPoliciesAgentpolicyidDownloadRequest(server string, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidDownloadParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentPolicyId", runtime.ParamLocationPath, agentPolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies/%s/download", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Download != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "download", runtime.ParamLocationQuery, *params.Download); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Standalone != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "standalone", runtime.ParamLocationQuery, *params.Standalone); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Kubernetes != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kubernetes", runtime.ParamLocationQuery, *params.Kubernetes); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentPoliciesAgentpolicyidFullRequest generates requests for GetFleetAgentPoliciesAgentpolicyidFull +func NewGetFleetAgentPoliciesAgentpolicyidFullRequest(server string, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidFullParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentPolicyId", runtime.ParamLocationPath, agentPolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies/%s/full", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Download != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "download", runtime.ParamLocationQuery, *params.Download); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Standalone != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "standalone", runtime.ParamLocationQuery, *params.Standalone); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Kubernetes != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kubernetes", runtime.ParamLocationQuery, *params.Kubernetes); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentPoliciesAgentpolicyidOutputsRequest generates requests for GetFleetAgentPoliciesAgentpolicyidOutputs +func NewGetFleetAgentPoliciesAgentpolicyidOutputsRequest(server string, agentPolicyId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentPolicyId", runtime.ParamLocationPath, agentPolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_policies/%s/outputs", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentStatusRequest generates requests for GetFleetAgentStatus +func NewGetFleetAgentStatusRequest(server string, params *GetFleetAgentStatusParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_status") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.PolicyId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "policyId", runtime.ParamLocationQuery, *params.PolicyId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PolicyIds != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "policyIds", runtime.ParamLocationQuery, *params.PolicyIds); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentStatusDataRequest generates requests for GetFleetAgentStatusData +func NewGetFleetAgentStatusDataRequest(server string, params *GetFleetAgentStatusDataParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agent_status/data") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "agentsIds", runtime.ParamLocationQuery, params.AgentsIds); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.PkgName != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "pkgName", runtime.ParamLocationQuery, *params.PkgName); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PkgVersion != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "pkgVersion", runtime.ParamLocationQuery, *params.PkgVersion); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PreviewData != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "previewData", runtime.ParamLocationQuery, *params.PreviewData); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentsRequest generates requests for GetFleetAgents +func NewGetFleetAgentsRequest(server string, params *GetFleetAgentsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ShowAgentless != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "showAgentless", runtime.ParamLocationQuery, *params.ShowAgentless); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ShowInactive != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "showInactive", runtime.ParamLocationQuery, *params.ShowInactive); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.WithMetrics != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withMetrics", runtime.ParamLocationQuery, *params.WithMetrics); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ShowUpgradeable != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "showUpgradeable", runtime.ParamLocationQuery, *params.ShowUpgradeable); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.GetStatusSummary != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "getStatusSummary", runtime.ParamLocationQuery, *params.GetStatusSummary); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortField", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SearchAfter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "searchAfter", runtime.ParamLocationQuery, *params.SearchAfter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.OpenPit != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "openPit", runtime.ParamLocationQuery, *params.OpenPit); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PitId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "pitId", runtime.ParamLocationQuery, *params.PitId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PitKeepAlive != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "pitKeepAlive", runtime.ParamLocationQuery, *params.PitKeepAlive); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetAgentsRequest calls the generic PostFleetAgents builder with application/json body +func NewPostFleetAgentsRequest(server string, body PostFleetAgentsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetAgentsRequestWithBody generates requests for PostFleetAgents with any type of body +func NewPostFleetAgentsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetAgentsActionStatusRequest generates requests for GetFleetAgentsActionStatus +func NewGetFleetAgentsActionStatusRequest(server string, params *GetFleetAgentsActionStatusParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/action_status") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Date != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "date", runtime.ParamLocationQuery, *params.Date); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Latest != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "latest", runtime.ParamLocationQuery, *params.Latest); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ErrorSize != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "errorSize", runtime.ParamLocationQuery, *params.ErrorSize); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetAgentsActionsActionidCancelRequest generates requests for PostFleetAgentsActionsActionidCancel +func NewPostFleetAgentsActionsActionidCancelRequest(server string, actionId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "actionId", runtime.ParamLocationPath, actionId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/actions/%s/cancel", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentsAvailableVersionsRequest generates requests for GetFleetAgentsAvailableVersions +func NewGetFleetAgentsAvailableVersionsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/available_versions") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetAgentsBulkReassignRequest calls the generic PostFleetAgentsBulkReassign builder with application/json body +func NewPostFleetAgentsBulkReassignRequest(server string, body PostFleetAgentsBulkReassignJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentsBulkReassignRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetAgentsBulkReassignRequestWithBody generates requests for PostFleetAgentsBulkReassign with any type of body +func NewPostFleetAgentsBulkReassignRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/bulk_reassign") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentsBulkRequestDiagnosticsRequest calls the generic PostFleetAgentsBulkRequestDiagnostics builder with application/json body +func NewPostFleetAgentsBulkRequestDiagnosticsRequest(server string, body PostFleetAgentsBulkRequestDiagnosticsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentsBulkRequestDiagnosticsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetAgentsBulkRequestDiagnosticsRequestWithBody generates requests for PostFleetAgentsBulkRequestDiagnostics with any type of body +func NewPostFleetAgentsBulkRequestDiagnosticsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/bulk_request_diagnostics") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentsBulkUnenrollRequest calls the generic PostFleetAgentsBulkUnenroll builder with application/json body +func NewPostFleetAgentsBulkUnenrollRequest(server string, body PostFleetAgentsBulkUnenrollJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentsBulkUnenrollRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetAgentsBulkUnenrollRequestWithBody generates requests for PostFleetAgentsBulkUnenroll with any type of body +func NewPostFleetAgentsBulkUnenrollRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/bulk_unenroll") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentsBulkUpdateAgentTagsRequest calls the generic PostFleetAgentsBulkUpdateAgentTags builder with application/json body +func NewPostFleetAgentsBulkUpdateAgentTagsRequest(server string, body PostFleetAgentsBulkUpdateAgentTagsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentsBulkUpdateAgentTagsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetAgentsBulkUpdateAgentTagsRequestWithBody generates requests for PostFleetAgentsBulkUpdateAgentTags with any type of body +func NewPostFleetAgentsBulkUpdateAgentTagsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/bulk_update_agent_tags") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentsBulkUpgradeRequest calls the generic PostFleetAgentsBulkUpgrade builder with application/json body +func NewPostFleetAgentsBulkUpgradeRequest(server string, body PostFleetAgentsBulkUpgradeJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentsBulkUpgradeRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetAgentsBulkUpgradeRequestWithBody generates requests for PostFleetAgentsBulkUpgrade with any type of body +func NewPostFleetAgentsBulkUpgradeRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/bulk_upgrade") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteFleetAgentsFilesFileidRequest generates requests for DeleteFleetAgentsFilesFileid +func NewDeleteFleetAgentsFilesFileidRequest(server string, fileId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "fileId", runtime.ParamLocationPath, fileId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/files/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentsFilesFileidFilenameRequest generates requests for GetFleetAgentsFilesFileidFilename +func NewGetFleetAgentsFilesFileidFilenameRequest(server string, fileId string, fileName string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "fileId", runtime.ParamLocationPath, fileId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "fileName", runtime.ParamLocationPath, fileName) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/files/%s/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentsSetupRequest generates requests for GetFleetAgentsSetup +func NewGetFleetAgentsSetupRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/setup") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetAgentsSetupRequest generates requests for PostFleetAgentsSetup +func NewPostFleetAgentsSetupRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/setup") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentsTagsRequest generates requests for GetFleetAgentsTags +func NewGetFleetAgentsTagsRequest(server string, params *GetFleetAgentsTagsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/tags") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ShowInactive != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "showInactive", runtime.ParamLocationQuery, *params.ShowInactive); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteFleetAgentsAgentidRequest generates requests for DeleteFleetAgentsAgentid +func NewDeleteFleetAgentsAgentidRequest(server string, agentId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentId", runtime.ParamLocationPath, agentId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetAgentsAgentidRequest generates requests for GetFleetAgentsAgentid +func NewGetFleetAgentsAgentidRequest(server string, agentId string, params *GetFleetAgentsAgentidParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentId", runtime.ParamLocationPath, agentId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.WithMetrics != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withMetrics", runtime.ParamLocationQuery, *params.WithMetrics); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutFleetAgentsAgentidRequest calls the generic PutFleetAgentsAgentid builder with application/json body +func NewPutFleetAgentsAgentidRequest(server string, agentId string, body PutFleetAgentsAgentidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutFleetAgentsAgentidRequestWithBody(server, agentId, "application/json", bodyReader) +} + +// NewPutFleetAgentsAgentidRequestWithBody generates requests for PutFleetAgentsAgentid with any type of body +func NewPutFleetAgentsAgentidRequestWithBody(server string, agentId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentId", runtime.ParamLocationPath, agentId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentsAgentidActionsRequest calls the generic PostFleetAgentsAgentidActions builder with application/json body +func NewPostFleetAgentsAgentidActionsRequest(server string, agentId string, body PostFleetAgentsAgentidActionsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentsAgentidActionsRequestWithBody(server, agentId, "application/json", bodyReader) +} + +// NewPostFleetAgentsAgentidActionsRequestWithBody generates requests for PostFleetAgentsAgentidActions with any type of body +func NewPostFleetAgentsAgentidActionsRequestWithBody(server string, agentId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentId", runtime.ParamLocationPath, agentId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/%s/actions", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentsAgentidReassignRequest calls the generic PostFleetAgentsAgentidReassign builder with application/json body +func NewPostFleetAgentsAgentidReassignRequest(server string, agentId string, body PostFleetAgentsAgentidReassignJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentsAgentidReassignRequestWithBody(server, agentId, "application/json", bodyReader) +} + +// NewPostFleetAgentsAgentidReassignRequestWithBody generates requests for PostFleetAgentsAgentidReassign with any type of body +func NewPostFleetAgentsAgentidReassignRequestWithBody(server string, agentId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentId", runtime.ParamLocationPath, agentId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/%s/reassign", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentsAgentidRequestDiagnosticsRequest calls the generic PostFleetAgentsAgentidRequestDiagnostics builder with application/json body +func NewPostFleetAgentsAgentidRequestDiagnosticsRequest(server string, agentId string, body PostFleetAgentsAgentidRequestDiagnosticsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentsAgentidRequestDiagnosticsRequestWithBody(server, agentId, "application/json", bodyReader) +} + +// NewPostFleetAgentsAgentidRequestDiagnosticsRequestWithBody generates requests for PostFleetAgentsAgentidRequestDiagnostics with any type of body +func NewPostFleetAgentsAgentidRequestDiagnosticsRequestWithBody(server string, agentId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentId", runtime.ParamLocationPath, agentId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/%s/request_diagnostics", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentsAgentidUnenrollRequest calls the generic PostFleetAgentsAgentidUnenroll builder with application/json body +func NewPostFleetAgentsAgentidUnenrollRequest(server string, agentId string, body PostFleetAgentsAgentidUnenrollJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentsAgentidUnenrollRequestWithBody(server, agentId, "application/json", bodyReader) +} + +// NewPostFleetAgentsAgentidUnenrollRequestWithBody generates requests for PostFleetAgentsAgentidUnenroll with any type of body +func NewPostFleetAgentsAgentidUnenrollRequestWithBody(server string, agentId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentId", runtime.ParamLocationPath, agentId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/%s/unenroll", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetAgentsAgentidUpgradeRequest calls the generic PostFleetAgentsAgentidUpgrade builder with application/json body +func NewPostFleetAgentsAgentidUpgradeRequest(server string, agentId string, body PostFleetAgentsAgentidUpgradeJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetAgentsAgentidUpgradeRequestWithBody(server, agentId, "application/json", bodyReader) +} + +// NewPostFleetAgentsAgentidUpgradeRequestWithBody generates requests for PostFleetAgentsAgentidUpgrade with any type of body +func NewPostFleetAgentsAgentidUpgradeRequestWithBody(server string, agentId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentId", runtime.ParamLocationPath, agentId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/%s/upgrade", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetAgentsAgentidUploadsRequest generates requests for GetFleetAgentsAgentidUploads +func NewGetFleetAgentsAgentidUploadsRequest(server string, agentId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentId", runtime.ParamLocationPath, agentId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/agents/%s/uploads", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetCheckPermissionsRequest generates requests for GetFleetCheckPermissions +func NewGetFleetCheckPermissionsRequest(server string, params *GetFleetCheckPermissionsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/check-permissions") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.FleetServerSetup != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "fleetServerSetup", runtime.ParamLocationQuery, *params.FleetServerSetup); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetDataStreamsRequest generates requests for GetFleetDataStreams +func NewGetFleetDataStreamsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/data_streams") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetEnrollmentApiKeysRequest generates requests for GetFleetEnrollmentApiKeys +func NewGetFleetEnrollmentApiKeysRequest(server string, params *GetFleetEnrollmentApiKeysParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/enrollment_api_keys") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetEnrollmentApiKeysRequest calls the generic PostFleetEnrollmentApiKeys builder with application/json body +func NewPostFleetEnrollmentApiKeysRequest(server string, body PostFleetEnrollmentApiKeysJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetEnrollmentApiKeysRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetEnrollmentApiKeysRequestWithBody generates requests for PostFleetEnrollmentApiKeys with any type of body +func NewPostFleetEnrollmentApiKeysRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/enrollment_api_keys") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteFleetEnrollmentApiKeysKeyidRequest generates requests for DeleteFleetEnrollmentApiKeysKeyid +func NewDeleteFleetEnrollmentApiKeysKeyidRequest(server string, keyId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "keyId", runtime.ParamLocationPath, keyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/enrollment_api_keys/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetEnrollmentApiKeysKeyidRequest generates requests for GetFleetEnrollmentApiKeysKeyid +func NewGetFleetEnrollmentApiKeysKeyidRequest(server string, keyId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "keyId", runtime.ParamLocationPath, keyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/enrollment_api_keys/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetEpmBulkAssetsRequest calls the generic PostFleetEpmBulkAssets builder with application/json body +func NewPostFleetEpmBulkAssetsRequest(server string, body PostFleetEpmBulkAssetsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetEpmBulkAssetsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetEpmBulkAssetsRequestWithBody generates requests for PostFleetEpmBulkAssets with any type of body +func NewPostFleetEpmBulkAssetsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/bulk_assets") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetEpmCategoriesRequest generates requests for GetFleetEpmCategories +func NewGetFleetEpmCategoriesRequest(server string, params *GetFleetEpmCategoriesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/categories") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Prerelease != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.IncludePolicyTemplates != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "include_policy_templates", runtime.ParamLocationQuery, *params.IncludePolicyTemplates); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetEpmCustomIntegrationsRequest calls the generic PostFleetEpmCustomIntegrations builder with application/json body +func NewPostFleetEpmCustomIntegrationsRequest(server string, body PostFleetEpmCustomIntegrationsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetEpmCustomIntegrationsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetEpmCustomIntegrationsRequestWithBody generates requests for PostFleetEpmCustomIntegrations with any type of body +func NewPostFleetEpmCustomIntegrationsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/custom_integrations") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPutFleetEpmCustomIntegrationsPkgnameRequest calls the generic PutFleetEpmCustomIntegrationsPkgname builder with application/json body +func NewPutFleetEpmCustomIntegrationsPkgnameRequest(server string, pkgName string, body PutFleetEpmCustomIntegrationsPkgnameJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutFleetEpmCustomIntegrationsPkgnameRequestWithBody(server, pkgName, "application/json", bodyReader) +} + +// NewPutFleetEpmCustomIntegrationsPkgnameRequestWithBody generates requests for PutFleetEpmCustomIntegrationsPkgname with any type of body +func NewPutFleetEpmCustomIntegrationsPkgnameRequestWithBody(server string, pkgName string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/custom_integrations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetEpmDataStreamsRequest generates requests for GetFleetEpmDataStreams +func NewGetFleetEpmDataStreamsRequest(server string, params *GetFleetEpmDataStreamsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/data_streams") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Type != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "type", runtime.ParamLocationQuery, *params.Type); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.DatasetQuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "datasetQuery", runtime.ParamLocationQuery, *params.DatasetQuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.UncategorisedOnly != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "uncategorisedOnly", runtime.ParamLocationQuery, *params.UncategorisedOnly); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetEpmPackagesRequest generates requests for GetFleetEpmPackages +func NewGetFleetEpmPackagesRequest(server string, params *GetFleetEpmPackagesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Category != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "category", runtime.ParamLocationQuery, *params.Category); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Prerelease != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ExcludeInstallStatus != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "excludeInstallStatus", runtime.ParamLocationQuery, *params.ExcludeInstallStatus); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.WithPackagePoliciesCount != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withPackagePoliciesCount", runtime.ParamLocationQuery, *params.WithPackagePoliciesCount); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetEpmPackagesRequestWithBody generates requests for PostFleetEpmPackages with any type of body +func NewPostFleetEpmPackagesRequestWithBody(server string, params *PostFleetEpmPackagesParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.IgnoreMappingUpdateErrors != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreMappingUpdateErrors", runtime.ParamLocationQuery, *params.IgnoreMappingUpdateErrors); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SkipDataStreamRollover != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "skipDataStreamRollover", runtime.ParamLocationQuery, *params.SkipDataStreamRollover); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetEpmPackagesBulkRequest calls the generic PostFleetEpmPackagesBulk builder with application/json body +func NewPostFleetEpmPackagesBulkRequest(server string, params *PostFleetEpmPackagesBulkParams, body PostFleetEpmPackagesBulkJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetEpmPackagesBulkRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostFleetEpmPackagesBulkRequestWithBody generates requests for PostFleetEpmPackagesBulk with any type of body +func NewPostFleetEpmPackagesBulkRequestWithBody(server string, params *PostFleetEpmPackagesBulkParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/_bulk") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Prerelease != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetEpmPackagesBulkUninstallRequest calls the generic PostFleetEpmPackagesBulkUninstall builder with application/json body +func NewPostFleetEpmPackagesBulkUninstallRequest(server string, body PostFleetEpmPackagesBulkUninstallJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetEpmPackagesBulkUninstallRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetEpmPackagesBulkUninstallRequestWithBody generates requests for PostFleetEpmPackagesBulkUninstall with any type of body +func NewPostFleetEpmPackagesBulkUninstallRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/_bulk_uninstall") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetEpmPackagesBulkUninstallTaskidRequest generates requests for GetFleetEpmPackagesBulkUninstallTaskid +func NewGetFleetEpmPackagesBulkUninstallTaskidRequest(server string, taskId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskId", runtime.ParamLocationPath, taskId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/_bulk_uninstall/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetEpmPackagesBulkUpgradeRequest calls the generic PostFleetEpmPackagesBulkUpgrade builder with application/json body +func NewPostFleetEpmPackagesBulkUpgradeRequest(server string, body PostFleetEpmPackagesBulkUpgradeJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetEpmPackagesBulkUpgradeRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetEpmPackagesBulkUpgradeRequestWithBody generates requests for PostFleetEpmPackagesBulkUpgrade with any type of body +func NewPostFleetEpmPackagesBulkUpgradeRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/_bulk_upgrade") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetEpmPackagesBulkUpgradeTaskidRequest generates requests for GetFleetEpmPackagesBulkUpgradeTaskid +func NewGetFleetEpmPackagesBulkUpgradeTaskidRequest(server string, taskId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "taskId", runtime.ParamLocationPath, taskId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/_bulk_upgrade/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetEpmPackagesInstalledRequest generates requests for GetFleetEpmPackagesInstalled +func NewGetFleetEpmPackagesInstalledRequest(server string, params *GetFleetEpmPackagesInstalledParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/installed") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.DataStreamType != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "dataStreamType", runtime.ParamLocationQuery, *params.DataStreamType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ShowOnlyActiveDataStreams != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "showOnlyActiveDataStreams", runtime.ParamLocationQuery, *params.ShowOnlyActiveDataStreams); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.NameQuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "nameQuery", runtime.ParamLocationQuery, *params.NameQuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SearchAfter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "searchAfter", runtime.ParamLocationQuery, *params.SearchAfter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetEpmPackagesLimitedRequest generates requests for GetFleetEpmPackagesLimited +func NewGetFleetEpmPackagesLimitedRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/limited") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetEpmPackagesPkgnameStatsRequest generates requests for GetFleetEpmPackagesPkgnameStats +func NewGetFleetEpmPackagesPkgnameStatsRequest(server string, pkgName string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/stats", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteFleetEpmPackagesPkgnamePkgversionRequest generates requests for DeleteFleetEpmPackagesPkgnamePkgversion +func NewDeleteFleetEpmPackagesPkgnamePkgversionRequest(server string, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Force != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "force", runtime.ParamLocationQuery, *params.Force); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetEpmPackagesPkgnamePkgversionRequest generates requests for GetFleetEpmPackagesPkgnamePkgversion +func NewGetFleetEpmPackagesPkgnamePkgversionRequest(server string, pkgName string, pkgVersion string, params *GetFleetEpmPackagesPkgnamePkgversionParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.IgnoreUnverified != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreUnverified", runtime.ParamLocationQuery, *params.IgnoreUnverified); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Prerelease != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Full != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "full", runtime.ParamLocationQuery, *params.Full); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.WithMetadata != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withMetadata", runtime.ParamLocationQuery, *params.WithMetadata); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetEpmPackagesPkgnamePkgversionRequest calls the generic PostFleetEpmPackagesPkgnamePkgversion builder with application/json body +func NewPostFleetEpmPackagesPkgnamePkgversionRequest(server string, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, body PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetEpmPackagesPkgnamePkgversionRequestWithBody(server, pkgName, pkgVersion, params, "application/json", bodyReader) +} + +// NewPostFleetEpmPackagesPkgnamePkgversionRequestWithBody generates requests for PostFleetEpmPackagesPkgnamePkgversion with any type of body +func NewPostFleetEpmPackagesPkgnamePkgversionRequestWithBody(server string, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Prerelease != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.IgnoreMappingUpdateErrors != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreMappingUpdateErrors", runtime.ParamLocationQuery, *params.IgnoreMappingUpdateErrors); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SkipDataStreamRollover != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "skipDataStreamRollover", runtime.ParamLocationQuery, *params.SkipDataStreamRollover); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPutFleetEpmPackagesPkgnamePkgversionRequest calls the generic PutFleetEpmPackagesPkgnamePkgversion builder with application/json body +func NewPutFleetEpmPackagesPkgnamePkgversionRequest(server string, pkgName string, pkgVersion string, body PutFleetEpmPackagesPkgnamePkgversionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutFleetEpmPackagesPkgnamePkgversionRequestWithBody(server, pkgName, pkgVersion, "application/json", bodyReader) +} + +// NewPutFleetEpmPackagesPkgnamePkgversionRequestWithBody generates requests for PutFleetEpmPackagesPkgnamePkgversion with any type of body +func NewPutFleetEpmPackagesPkgnamePkgversionRequestWithBody(server string, pkgName string, pkgVersion string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsRequest generates requests for DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssets +func NewDeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsRequest(server string, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s/datastream_assets", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "packagePolicyId", runtime.ParamLocationQuery, params.PackagePolicyId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsRequest generates requests for DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssets +func NewDeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsRequest(server string, pkgName string, pkgVersion string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s/kibana_assets", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetEpmPackagesPkgnamePkgversionKibanaAssetsRequest calls the generic PostFleetEpmPackagesPkgnamePkgversionKibanaAssets builder with application/json body +func NewPostFleetEpmPackagesPkgnamePkgversionKibanaAssetsRequest(server string, pkgName string, pkgVersion string, body PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetEpmPackagesPkgnamePkgversionKibanaAssetsRequestWithBody(server, pkgName, pkgVersion, "application/json", bodyReader) +} + +// NewPostFleetEpmPackagesPkgnamePkgversionKibanaAssetsRequestWithBody generates requests for PostFleetEpmPackagesPkgnamePkgversionKibanaAssets with any type of body +func NewPostFleetEpmPackagesPkgnamePkgversionKibanaAssetsRequestWithBody(server string, pkgName string, pkgVersion string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s/kibana_assets", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeRequest calls the generic PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorize builder with application/json body +func NewPostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeRequest(server string, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams, body PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeRequestWithBody(server, pkgName, pkgVersion, params, "application/json", bodyReader) +} + +// NewPostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeRequestWithBody generates requests for PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorize with any type of body +func NewPostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeRequestWithBody(server string, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s/transforms/authorize", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Prerelease != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetEpmPackagesPkgnamePkgversionFilepathRequest generates requests for GetFleetEpmPackagesPkgnamePkgversionFilepath +func NewGetFleetEpmPackagesPkgnamePkgversionFilepathRequest(server string, pkgName string, pkgVersion string, filePath string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) + if err != nil { + return nil, err + } + + var pathParam2 string + + pathParam2, err = runtime.StyleParamWithLocation("simple", false, "filePath", runtime.ParamLocationPath, filePath) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s/%s", pathParam0, pathParam1, pathParam2) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetEpmTemplatesPkgnamePkgversionInputsRequest generates requests for GetFleetEpmTemplatesPkgnamePkgversionInputs +func NewGetFleetEpmTemplatesPkgnamePkgversionInputsRequest(server string, pkgName string, pkgVersion string, params *GetFleetEpmTemplatesPkgnamePkgversionInputsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/templates/%s/%s/inputs", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Prerelease != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.IgnoreUnverified != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreUnverified", runtime.ParamLocationQuery, *params.IgnoreUnverified); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetEpmVerificationKeyIdRequest generates requests for GetFleetEpmVerificationKeyId +func NewGetFleetEpmVerificationKeyIdRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/epm/verification_key_id") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetFleetServerHostsRequest generates requests for GetFleetFleetServerHosts +func NewGetFleetFleetServerHostsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetFleetServerHostsRequest calls the generic PostFleetFleetServerHosts builder with application/json body +func NewPostFleetFleetServerHostsRequest(server string, body PostFleetFleetServerHostsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetFleetServerHostsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetFleetServerHostsRequestWithBody generates requests for PostFleetFleetServerHosts with any type of body +func NewPostFleetFleetServerHostsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteFleetFleetServerHostsItemidRequest generates requests for DeleteFleetFleetServerHostsItemid +func NewDeleteFleetFleetServerHostsItemidRequest(server string, itemId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "itemId", runtime.ParamLocationPath, itemId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetFleetServerHostsItemidRequest generates requests for GetFleetFleetServerHostsItemid +func NewGetFleetFleetServerHostsItemidRequest(server string, itemId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "itemId", runtime.ParamLocationPath, itemId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutFleetFleetServerHostsItemidRequest calls the generic PutFleetFleetServerHostsItemid builder with application/json body +func NewPutFleetFleetServerHostsItemidRequest(server string, itemId string, body PutFleetFleetServerHostsItemidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutFleetFleetServerHostsItemidRequestWithBody(server, itemId, "application/json", bodyReader) +} + +// NewPutFleetFleetServerHostsItemidRequestWithBody generates requests for PutFleetFleetServerHostsItemid with any type of body +func NewPutFleetFleetServerHostsItemidRequestWithBody(server string, itemId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "itemId", runtime.ParamLocationPath, itemId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetHealthCheckRequest calls the generic PostFleetHealthCheck builder with application/json body +func NewPostFleetHealthCheckRequest(server string, body PostFleetHealthCheckJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetHealthCheckRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetHealthCheckRequestWithBody generates requests for PostFleetHealthCheck with any type of body +func NewPostFleetHealthCheckRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/health_check") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetKubernetesRequest generates requests for GetFleetKubernetes +func NewGetFleetKubernetesRequest(server string, params *GetFleetKubernetesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/kubernetes") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Download != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "download", runtime.ParamLocationQuery, *params.Download); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.FleetServer != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "fleetServer", runtime.ParamLocationQuery, *params.FleetServer); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.EnrolToken != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enrolToken", runtime.ParamLocationQuery, *params.EnrolToken); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetKubernetesDownloadRequest generates requests for GetFleetKubernetesDownload +func NewGetFleetKubernetesDownloadRequest(server string, params *GetFleetKubernetesDownloadParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/kubernetes/download") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Download != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "download", runtime.ParamLocationQuery, *params.Download); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.FleetServer != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "fleetServer", runtime.ParamLocationQuery, *params.FleetServer); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.EnrolToken != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "enrolToken", runtime.ParamLocationQuery, *params.EnrolToken); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetLogstashApiKeysRequest generates requests for PostFleetLogstashApiKeys +func NewPostFleetLogstashApiKeysRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/logstash_api_keys") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetMessageSigningServiceRotateKeyPairRequest generates requests for PostFleetMessageSigningServiceRotateKeyPair +func NewPostFleetMessageSigningServiceRotateKeyPairRequest(server string, params *PostFleetMessageSigningServiceRotateKeyPairParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/message_signing_service/rotate_key_pair") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Acknowledge != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "acknowledge", runtime.ParamLocationQuery, *params.Acknowledge); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetOutputsRequest generates requests for GetFleetOutputs +func NewGetFleetOutputsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/outputs") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetOutputsRequest calls the generic PostFleetOutputs builder with application/json body +func NewPostFleetOutputsRequest(server string, body PostFleetOutputsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetOutputsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetOutputsRequestWithBody generates requests for PostFleetOutputs with any type of body +func NewPostFleetOutputsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/outputs") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteFleetOutputsOutputidRequest generates requests for DeleteFleetOutputsOutputid +func NewDeleteFleetOutputsOutputidRequest(server string, outputId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "outputId", runtime.ParamLocationPath, outputId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/outputs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetOutputsOutputidRequest generates requests for GetFleetOutputsOutputid +func NewGetFleetOutputsOutputidRequest(server string, outputId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "outputId", runtime.ParamLocationPath, outputId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/outputs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutFleetOutputsOutputidRequest calls the generic PutFleetOutputsOutputid builder with application/json body +func NewPutFleetOutputsOutputidRequest(server string, outputId string, body PutFleetOutputsOutputidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutFleetOutputsOutputidRequestWithBody(server, outputId, "application/json", bodyReader) +} + +// NewPutFleetOutputsOutputidRequestWithBody generates requests for PutFleetOutputsOutputid with any type of body +func NewPutFleetOutputsOutputidRequestWithBody(server string, outputId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "outputId", runtime.ParamLocationPath, outputId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/outputs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetOutputsOutputidHealthRequest generates requests for GetFleetOutputsOutputidHealth +func NewGetFleetOutputsOutputidHealthRequest(server string, outputId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "outputId", runtime.ParamLocationPath, outputId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/outputs/%s/health", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetPackagePoliciesRequest generates requests for GetFleetPackagePolicies +func NewGetFleetPackagePoliciesRequest(server string, params *GetFleetPackagePoliciesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/package_policies") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortField", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ShowUpgradeable != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "showUpgradeable", runtime.ParamLocationQuery, *params.ShowUpgradeable); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.WithAgentCount != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withAgentCount", runtime.ParamLocationQuery, *params.WithAgentCount); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetPackagePoliciesRequest calls the generic PostFleetPackagePolicies builder with application/json body +func NewPostFleetPackagePoliciesRequest(server string, params *PostFleetPackagePoliciesParams, body PostFleetPackagePoliciesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetPackagePoliciesRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostFleetPackagePoliciesRequestWithBody generates requests for PostFleetPackagePolicies with any type of body +func NewPostFleetPackagePoliciesRequestWithBody(server string, params *PostFleetPackagePoliciesParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/package_policies") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetPackagePoliciesBulkGetRequest calls the generic PostFleetPackagePoliciesBulkGet builder with application/json body +func NewPostFleetPackagePoliciesBulkGetRequest(server string, params *PostFleetPackagePoliciesBulkGetParams, body PostFleetPackagePoliciesBulkGetJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetPackagePoliciesBulkGetRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewPostFleetPackagePoliciesBulkGetRequestWithBody generates requests for PostFleetPackagePoliciesBulkGet with any type of body +func NewPostFleetPackagePoliciesBulkGetRequestWithBody(server string, params *PostFleetPackagePoliciesBulkGetParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/package_policies/_bulk_get") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetPackagePoliciesDeleteRequest calls the generic PostFleetPackagePoliciesDelete builder with application/json body +func NewPostFleetPackagePoliciesDeleteRequest(server string, body PostFleetPackagePoliciesDeleteJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetPackagePoliciesDeleteRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetPackagePoliciesDeleteRequestWithBody generates requests for PostFleetPackagePoliciesDelete with any type of body +func NewPostFleetPackagePoliciesDeleteRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/package_policies/delete") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetPackagePoliciesUpgradeRequest calls the generic PostFleetPackagePoliciesUpgrade builder with application/json body +func NewPostFleetPackagePoliciesUpgradeRequest(server string, body PostFleetPackagePoliciesUpgradeJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetPackagePoliciesUpgradeRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetPackagePoliciesUpgradeRequestWithBody generates requests for PostFleetPackagePoliciesUpgrade with any type of body +func NewPostFleetPackagePoliciesUpgradeRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/package_policies/upgrade") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetPackagePoliciesUpgradeDryrunRequest calls the generic PostFleetPackagePoliciesUpgradeDryrun builder with application/json body +func NewPostFleetPackagePoliciesUpgradeDryrunRequest(server string, body PostFleetPackagePoliciesUpgradeDryrunJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetPackagePoliciesUpgradeDryrunRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetPackagePoliciesUpgradeDryrunRequestWithBody generates requests for PostFleetPackagePoliciesUpgradeDryrun with any type of body +func NewPostFleetPackagePoliciesUpgradeDryrunRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/package_policies/upgrade/dryrun") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteFleetPackagePoliciesPackagepolicyidRequest generates requests for DeleteFleetPackagePoliciesPackagepolicyid +func NewDeleteFleetPackagePoliciesPackagepolicyidRequest(server string, packagePolicyId string, params *DeleteFleetPackagePoliciesPackagepolicyidParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "packagePolicyId", runtime.ParamLocationPath, packagePolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/package_policies/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Force != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "force", runtime.ParamLocationQuery, *params.Force); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetPackagePoliciesPackagepolicyidRequest generates requests for GetFleetPackagePoliciesPackagepolicyid +func NewGetFleetPackagePoliciesPackagepolicyidRequest(server string, packagePolicyId string, params *GetFleetPackagePoliciesPackagepolicyidParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "packagePolicyId", runtime.ParamLocationPath, packagePolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/package_policies/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutFleetPackagePoliciesPackagepolicyidRequest calls the generic PutFleetPackagePoliciesPackagepolicyid builder with application/json body +func NewPutFleetPackagePoliciesPackagepolicyidRequest(server string, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, body PutFleetPackagePoliciesPackagepolicyidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutFleetPackagePoliciesPackagepolicyidRequestWithBody(server, packagePolicyId, params, "application/json", bodyReader) +} + +// NewPutFleetPackagePoliciesPackagepolicyidRequestWithBody generates requests for PutFleetPackagePoliciesPackagepolicyid with any type of body +func NewPutFleetPackagePoliciesPackagepolicyidRequestWithBody(server string, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "packagePolicyId", runtime.ParamLocationPath, packagePolicyId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/package_policies/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetProxiesRequest generates requests for GetFleetProxies +func NewGetFleetProxiesRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/proxies") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetProxiesRequest calls the generic PostFleetProxies builder with application/json body +func NewPostFleetProxiesRequest(server string, body PostFleetProxiesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetProxiesRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetProxiesRequestWithBody generates requests for PostFleetProxies with any type of body +func NewPostFleetProxiesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/proxies") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteFleetProxiesItemidRequest generates requests for DeleteFleetProxiesItemid +func NewDeleteFleetProxiesItemidRequest(server string, itemId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "itemId", runtime.ParamLocationPath, itemId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/proxies/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetProxiesItemidRequest generates requests for GetFleetProxiesItemid +func NewGetFleetProxiesItemidRequest(server string, itemId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "itemId", runtime.ParamLocationPath, itemId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/proxies/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutFleetProxiesItemidRequest calls the generic PutFleetProxiesItemid builder with application/json body +func NewPutFleetProxiesItemidRequest(server string, itemId string, body PutFleetProxiesItemidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutFleetProxiesItemidRequestWithBody(server, itemId, "application/json", bodyReader) +} + +// NewPutFleetProxiesItemidRequestWithBody generates requests for PutFleetProxiesItemid with any type of body +func NewPutFleetProxiesItemidRequestWithBody(server string, itemId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "itemId", runtime.ParamLocationPath, itemId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/proxies/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetRemoteSyncedIntegrationsStatusRequest generates requests for GetFleetRemoteSyncedIntegrationsStatus +func NewGetFleetRemoteSyncedIntegrationsStatusRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/remote_synced_integrations/status") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetRemoteSyncedIntegrationsOutputidRemoteStatusRequest generates requests for GetFleetRemoteSyncedIntegrationsOutputidRemoteStatus +func NewGetFleetRemoteSyncedIntegrationsOutputidRemoteStatusRequest(server string, outputId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "outputId", runtime.ParamLocationPath, outputId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/remote_synced_integrations/%s/remote_status", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostFleetServiceTokensRequest calls the generic PostFleetServiceTokens builder with application/json body +func NewPostFleetServiceTokensRequest(server string, body PostFleetServiceTokensJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostFleetServiceTokensRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostFleetServiceTokensRequestWithBody generates requests for PostFleetServiceTokens with any type of body +func NewPostFleetServiceTokensRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/service_tokens") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetSettingsRequest generates requests for GetFleetSettings +func NewGetFleetSettingsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/settings") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutFleetSettingsRequest calls the generic PutFleetSettings builder with application/json body +func NewPutFleetSettingsRequest(server string, body PutFleetSettingsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutFleetSettingsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPutFleetSettingsRequestWithBody generates requests for PutFleetSettings with any type of body +func NewPutFleetSettingsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/settings") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostFleetSetupRequest generates requests for PostFleetSetup +func NewPostFleetSetupRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/setup") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetSpaceSettingsRequest generates requests for GetFleetSpaceSettings +func NewGetFleetSpaceSettingsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/space_settings") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutFleetSpaceSettingsRequest calls the generic PutFleetSpaceSettings builder with application/json body +func NewPutFleetSpaceSettingsRequest(server string, body PutFleetSpaceSettingsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutFleetSpaceSettingsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPutFleetSpaceSettingsRequestWithBody generates requests for PutFleetSpaceSettings with any type of body +func NewPutFleetSpaceSettingsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/space_settings") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetFleetUninstallTokensRequest generates requests for GetFleetUninstallTokens +func NewGetFleetUninstallTokensRequest(server string, params *GetFleetUninstallTokensParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/uninstall_tokens") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.PolicyId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "policyId", runtime.ParamLocationQuery, *params.PolicyId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Search != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "search", runtime.ParamLocationQuery, *params.Search); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetFleetUninstallTokensUninstalltokenidRequest generates requests for GetFleetUninstallTokensUninstalltokenid +func NewGetFleetUninstallTokensUninstalltokenidRequest(server string, uninstallTokenId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "uninstallTokenId", runtime.ParamLocationPath, uninstallTokenId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/fleet/uninstall_tokens/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteListRequest generates requests for DeleteList +func NewDeleteListRequest(server string, params *DeleteListParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.DeleteReferences != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "deleteReferences", runtime.ParamLocationQuery, *params.DeleteReferences); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.IgnoreReferences != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreReferences", runtime.ParamLocationQuery, *params.IgnoreReferences); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadListRequest generates requests for ReadList +func NewReadListRequest(server string, params *ReadListParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPatchListRequest calls the generic PatchList builder with application/json body +func NewPatchListRequest(server string, body PatchListJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchListRequestWithBody(server, "application/json", bodyReader) +} + +// NewPatchListRequestWithBody generates requests for PatchList with any type of body +func NewPatchListRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateListRequest calls the generic CreateList builder with application/json body +func NewCreateListRequest(server string, body CreateListJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateListRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateListRequestWithBody generates requests for CreateList with any type of body +func NewCreateListRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewUpdateListRequest calls the generic UpdateList builder with application/json body +func NewUpdateListRequest(server string, body UpdateListJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateListRequestWithBody(server, "application/json", bodyReader) +} + +// NewUpdateListRequestWithBody generates requests for UpdateList with any type of body +func NewUpdateListRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindListsRequest generates requests for FindLists +func NewFindListsRequest(server string, params *FindListsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Cursor != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "cursor", runtime.ParamLocationQuery, *params.Cursor); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteListIndexRequest generates requests for DeleteListIndex +func NewDeleteListIndexRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/index") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadListIndexRequest generates requests for ReadListIndex +func NewReadListIndexRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/index") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateListIndexRequest generates requests for CreateListIndex +func NewCreateListIndexRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/index") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteListItemRequest generates requests for DeleteListItem +func NewDeleteListItemRequest(server string, params *DeleteListItemParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ListId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "list_id", runtime.ParamLocationQuery, *params.ListId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Value != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "value", runtime.ParamLocationQuery, *params.Value); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Refresh != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "refresh", runtime.ParamLocationQuery, *params.Refresh); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadListItemRequest generates requests for ReadListItem +func NewReadListItemRequest(server string, params *ReadListItemParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ListId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "list_id", runtime.ParamLocationQuery, *params.ListId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Value != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "value", runtime.ParamLocationQuery, *params.Value); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPatchListItemRequest calls the generic PatchListItem builder with application/json body +func NewPatchListItemRequest(server string, body PatchListItemJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchListItemRequestWithBody(server, "application/json", bodyReader) +} + +// NewPatchListItemRequestWithBody generates requests for PatchListItem with any type of body +func NewPatchListItemRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateListItemRequest calls the generic CreateListItem builder with application/json body +func NewCreateListItemRequest(server string, body CreateListItemJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateListItemRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateListItemRequestWithBody generates requests for CreateListItem with any type of body +func NewCreateListItemRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewUpdateListItemRequest calls the generic UpdateListItem builder with application/json body +func NewUpdateListItemRequest(server string, body UpdateListItemJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateListItemRequestWithBody(server, "application/json", bodyReader) +} + +// NewUpdateListItemRequestWithBody generates requests for UpdateListItem with any type of body +func NewUpdateListItemRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/items") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewExportListItemsRequest generates requests for ExportListItems +func NewExportListItemsRequest(server string, params *ExportListItemsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/items/_export") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "list_id", runtime.ParamLocationQuery, params.ListId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewFindListItemsRequest generates requests for FindListItems +func NewFindListItemsRequest(server string, params *FindListItemsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/items/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "list_id", runtime.ParamLocationQuery, params.ListId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Cursor != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "cursor", runtime.ParamLocationQuery, *params.Cursor); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewImportListItemsRequestWithBody generates requests for ImportListItems with any type of body +func NewImportListItemsRequestWithBody(server string, params *ImportListItemsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/items/_import") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.ListId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "list_id", runtime.ParamLocationQuery, *params.ListId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Type != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "type", runtime.ParamLocationQuery, *params.Type); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Serializer != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "serializer", runtime.ParamLocationQuery, *params.Serializer); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Deserializer != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "deserializer", runtime.ParamLocationQuery, *params.Deserializer); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Refresh != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "refresh", runtime.ParamLocationQuery, *params.Refresh); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewReadListPrivilegesRequest generates requests for ReadListPrivileges +func NewReadListPrivilegesRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/lists/privileges") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteLogstashPipelineRequest generates requests for DeleteLogstashPipeline +func NewDeleteLogstashPipelineRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/logstash/pipeline/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetLogstashPipelineRequest generates requests for GetLogstashPipeline +func NewGetLogstashPipelineRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/logstash/pipeline/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutLogstashPipelineRequest calls the generic PutLogstashPipeline builder with application/json body +func NewPutLogstashPipelineRequest(server string, id string, body PutLogstashPipelineJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutLogstashPipelineRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewPutLogstashPipelineRequestWithBody generates requests for PutLogstashPipeline with any type of body +func NewPutLogstashPipelineRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/logstash/pipeline/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetLogstashPipelinesRequest generates requests for GetLogstashPipelines +func NewGetLogstashPipelinesRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/logstash/pipelines") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostMaintenanceWindowIdArchiveRequest generates requests for PostMaintenanceWindowIdArchive +func NewPostMaintenanceWindowIdArchiveRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/maintenance_window/%s/_archive", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostMaintenanceWindowIdUnarchiveRequest generates requests for PostMaintenanceWindowIdUnarchive +func NewPostMaintenanceWindowIdUnarchiveRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/maintenance_window/%s/_unarchive", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewMlSyncRequest generates requests for MlSync +func NewMlSyncRequest(server string, params *MlSyncParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/ml/saved_objects/sync") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Simulate != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "simulate", runtime.ParamLocationQuery, *params.Simulate); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteNoteRequest calls the generic DeleteNote builder with application/json body +func NewDeleteNoteRequest(server string, body DeleteNoteJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteNoteRequestWithBody(server, "application/json", bodyReader) +} + +// NewDeleteNoteRequestWithBody generates requests for DeleteNote with any type of body +func NewDeleteNoteRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/note") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetNotesRequest generates requests for GetNotes +func NewGetNotesRequest(server string, params *GetNotesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/note") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.DocumentIds != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "documentIds", runtime.ParamLocationQuery, *params.DocumentIds); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SavedObjectIds != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "savedObjectIds", runtime.ParamLocationQuery, *params.SavedObjectIds); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Search != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "search", runtime.ParamLocationQuery, *params.Search); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortField", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.CreatedByFilter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "createdByFilter", runtime.ParamLocationQuery, *params.CreatedByFilter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.AssociatedFilter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "associatedFilter", runtime.ParamLocationQuery, *params.AssociatedFilter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPersistNoteRouteRequest calls the generic PersistNoteRoute builder with application/json body +func NewPersistNoteRouteRequest(server string, body PersistNoteRouteJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPersistNoteRouteRequestWithBody(server, "application/json", bodyReader) +} + +// NewPersistNoteRouteRequestWithBody generates requests for PersistNoteRoute with any type of body +func NewPersistNoteRouteRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/note") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewObservabilityAiAssistantChatCompleteRequest calls the generic ObservabilityAiAssistantChatComplete builder with application/json body +func NewObservabilityAiAssistantChatCompleteRequest(server string, body ObservabilityAiAssistantChatCompleteJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewObservabilityAiAssistantChatCompleteRequestWithBody(server, "application/json", bodyReader) +} + +// NewObservabilityAiAssistantChatCompleteRequestWithBody generates requests for ObservabilityAiAssistantChatComplete with any type of body +func NewObservabilityAiAssistantChatCompleteRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/observability_ai_assistant/chat/complete") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewOsqueryFindLiveQueriesRequest generates requests for OsqueryFindLiveQueries +func NewOsqueryFindLiveQueriesRequest(server string, params *OsqueryFindLiveQueriesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/live_queries") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PageSize != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "pageSize", runtime.ParamLocationQuery, *params.PageSize); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Sort != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort", runtime.ParamLocationQuery, *params.Sort); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewOsqueryCreateLiveQueryRequest calls the generic OsqueryCreateLiveQuery builder with application/json body +func NewOsqueryCreateLiveQueryRequest(server string, body OsqueryCreateLiveQueryJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewOsqueryCreateLiveQueryRequestWithBody(server, "application/json", bodyReader) +} + +// NewOsqueryCreateLiveQueryRequestWithBody generates requests for OsqueryCreateLiveQuery with any type of body +func NewOsqueryCreateLiveQueryRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/live_queries") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewOsqueryGetLiveQueryDetailsRequest generates requests for OsqueryGetLiveQueryDetails +func NewOsqueryGetLiveQueryDetailsRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/live_queries/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewOsqueryGetLiveQueryResultsRequest generates requests for OsqueryGetLiveQueryResults +func NewOsqueryGetLiveQueryResultsRequest(server string, id string, actionId string, params *OsqueryGetLiveQueryResultsParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "actionId", runtime.ParamLocationPath, actionId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/live_queries/%s/results/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Kuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PageSize != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "pageSize", runtime.ParamLocationQuery, *params.PageSize); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Sort != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort", runtime.ParamLocationQuery, *params.Sort); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewOsqueryFindPacksRequest generates requests for OsqueryFindPacks +func NewOsqueryFindPacksRequest(server string, params *OsqueryFindPacksParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/packs") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PageSize != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "pageSize", runtime.ParamLocationQuery, *params.PageSize); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Sort != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort", runtime.ParamLocationQuery, *params.Sort); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewOsqueryCreatePacksRequest calls the generic OsqueryCreatePacks builder with application/json body +func NewOsqueryCreatePacksRequest(server string, body OsqueryCreatePacksJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewOsqueryCreatePacksRequestWithBody(server, "application/json", bodyReader) +} + +// NewOsqueryCreatePacksRequestWithBody generates requests for OsqueryCreatePacks with any type of body +func NewOsqueryCreatePacksRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/packs") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewOsqueryDeletePacksRequest generates requests for OsqueryDeletePacks +func NewOsqueryDeletePacksRequest(server string, id SecurityOsqueryAPIPackId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/packs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewOsqueryGetPacksDetailsRequest generates requests for OsqueryGetPacksDetails +func NewOsqueryGetPacksDetailsRequest(server string, id SecurityOsqueryAPIPackId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/packs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewOsqueryUpdatePacksRequest calls the generic OsqueryUpdatePacks builder with application/json body +func NewOsqueryUpdatePacksRequest(server string, id SecurityOsqueryAPIPackId, body OsqueryUpdatePacksJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewOsqueryUpdatePacksRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewOsqueryUpdatePacksRequestWithBody generates requests for OsqueryUpdatePacks with any type of body +func NewOsqueryUpdatePacksRequestWithBody(server string, id SecurityOsqueryAPIPackId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/packs/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewOsqueryFindSavedQueriesRequest generates requests for OsqueryFindSavedQueries +func NewOsqueryFindSavedQueriesRequest(server string, params *OsqueryFindSavedQueriesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/saved_queries") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PageSize != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "pageSize", runtime.ParamLocationQuery, *params.PageSize); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Sort != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort", runtime.ParamLocationQuery, *params.Sort); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewOsqueryCreateSavedQueryRequest calls the generic OsqueryCreateSavedQuery builder with application/json body +func NewOsqueryCreateSavedQueryRequest(server string, body OsqueryCreateSavedQueryJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewOsqueryCreateSavedQueryRequestWithBody(server, "application/json", bodyReader) +} + +// NewOsqueryCreateSavedQueryRequestWithBody generates requests for OsqueryCreateSavedQuery with any type of body +func NewOsqueryCreateSavedQueryRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/saved_queries") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewOsqueryDeleteSavedQueryRequest generates requests for OsqueryDeleteSavedQuery +func NewOsqueryDeleteSavedQueryRequest(server string, id SecurityOsqueryAPISavedQueryId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/saved_queries/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewOsqueryGetSavedQueryDetailsRequest generates requests for OsqueryGetSavedQueryDetails +func NewOsqueryGetSavedQueryDetailsRequest(server string, id SecurityOsqueryAPISavedQueryId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/saved_queries/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewOsqueryUpdateSavedQueryRequest calls the generic OsqueryUpdateSavedQuery builder with application/json body +func NewOsqueryUpdateSavedQueryRequest(server string, id SecurityOsqueryAPISavedQueryId, body OsqueryUpdateSavedQueryJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewOsqueryUpdateSavedQueryRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewOsqueryUpdateSavedQueryRequestWithBody generates requests for OsqueryUpdateSavedQuery with any type of body +func NewOsqueryUpdateSavedQueryRequestWithBody(server string, id SecurityOsqueryAPISavedQueryId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/osquery/saved_queries/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPersistPinnedEventRouteRequest calls the generic PersistPinnedEventRoute builder with application/json body +func NewPersistPinnedEventRouteRequest(server string, body PersistPinnedEventRouteJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPersistPinnedEventRouteRequestWithBody(server, "application/json", bodyReader) +} + +// NewPersistPinnedEventRouteRequestWithBody generates requests for PersistPinnedEventRoute with any type of body +func NewPersistPinnedEventRouteRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/pinned_event") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCleanUpRiskEngineRequest generates requests for CleanUpRiskEngine +func NewCleanUpRiskEngineRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/risk_score/engine/dangerously_delete_data") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewConfigureRiskEngineSavedObjectRequest calls the generic ConfigureRiskEngineSavedObject builder with application/json body +func NewConfigureRiskEngineSavedObjectRequest(server string, body ConfigureRiskEngineSavedObjectJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewConfigureRiskEngineSavedObjectRequestWithBody(server, "application/json", bodyReader) +} + +// NewConfigureRiskEngineSavedObjectRequestWithBody generates requests for ConfigureRiskEngineSavedObject with any type of body +func NewConfigureRiskEngineSavedObjectRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/risk_score/engine/saved_object/configure") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewScheduleRiskEngineNowRequest calls the generic ScheduleRiskEngineNow builder with application/json body +func NewScheduleRiskEngineNowRequest(server string, body ScheduleRiskEngineNowJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewScheduleRiskEngineNowRequestWithBody(server, "application/json", bodyReader) +} + +// NewScheduleRiskEngineNowRequestWithBody generates requests for ScheduleRiskEngineNow with any type of body +func NewScheduleRiskEngineNowRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/risk_score/engine/schedule_now") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewBulkCreateSavedObjectsRequest calls the generic BulkCreateSavedObjects builder with application/json body +func NewBulkCreateSavedObjectsRequest(server string, params *BulkCreateSavedObjectsParams, body BulkCreateSavedObjectsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewBulkCreateSavedObjectsRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewBulkCreateSavedObjectsRequestWithBody generates requests for BulkCreateSavedObjects with any type of body +func NewBulkCreateSavedObjectsRequestWithBody(server string, params *BulkCreateSavedObjectsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/_bulk_create") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Overwrite != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "overwrite", runtime.ParamLocationQuery, *params.Overwrite); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewBulkDeleteSavedObjectsRequest calls the generic BulkDeleteSavedObjects builder with application/json body +func NewBulkDeleteSavedObjectsRequest(server string, params *BulkDeleteSavedObjectsParams, body BulkDeleteSavedObjectsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewBulkDeleteSavedObjectsRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewBulkDeleteSavedObjectsRequestWithBody generates requests for BulkDeleteSavedObjects with any type of body +func NewBulkDeleteSavedObjectsRequestWithBody(server string, params *BulkDeleteSavedObjectsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/_bulk_delete") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Force != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "force", runtime.ParamLocationQuery, *params.Force); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewBulkGetSavedObjectsRequest calls the generic BulkGetSavedObjects builder with application/json body +func NewBulkGetSavedObjectsRequest(server string, body BulkGetSavedObjectsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewBulkGetSavedObjectsRequestWithBody(server, "application/json", bodyReader) +} + +// NewBulkGetSavedObjectsRequestWithBody generates requests for BulkGetSavedObjects with any type of body +func NewBulkGetSavedObjectsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/_bulk_get") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewBulkResolveSavedObjectsRequest calls the generic BulkResolveSavedObjects builder with application/json body +func NewBulkResolveSavedObjectsRequest(server string, body BulkResolveSavedObjectsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewBulkResolveSavedObjectsRequestWithBody(server, "application/json", bodyReader) +} + +// NewBulkResolveSavedObjectsRequestWithBody generates requests for BulkResolveSavedObjects with any type of body +func NewBulkResolveSavedObjectsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/_bulk_resolve") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewBulkUpdateSavedObjectsRequest calls the generic BulkUpdateSavedObjects builder with application/json body +func NewBulkUpdateSavedObjectsRequest(server string, body BulkUpdateSavedObjectsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewBulkUpdateSavedObjectsRequestWithBody(server, "application/json", bodyReader) +} + +// NewBulkUpdateSavedObjectsRequestWithBody generates requests for BulkUpdateSavedObjects with any type of body +func NewBulkUpdateSavedObjectsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/_bulk_update") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostSavedObjectsExportRequest calls the generic PostSavedObjectsExport builder with application/json body +func NewPostSavedObjectsExportRequest(server string, body PostSavedObjectsExportJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSavedObjectsExportRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostSavedObjectsExportRequestWithBody generates requests for PostSavedObjectsExport with any type of body +func NewPostSavedObjectsExportRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/_export") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindSavedObjectsRequest generates requests for FindSavedObjects +func NewFindSavedObjectsRequest(server string, params *FindSavedObjectsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Aggs != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "aggs", runtime.ParamLocationQuery, *params.Aggs); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.DefaultSearchOperator != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "default_search_operator", runtime.ParamLocationQuery, *params.DefaultSearchOperator); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Fields != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "fields", runtime.ParamLocationQuery, *params.Fields); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.HasNoReference != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "has_no_reference", runtime.ParamLocationQuery, *params.HasNoReference); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.HasNoReferenceOperator != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "has_no_reference_operator", runtime.ParamLocationQuery, *params.HasNoReferenceOperator); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.HasReference != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "has_reference", runtime.ParamLocationQuery, *params.HasReference); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.HasReferenceOperator != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "has_reference_operator", runtime.ParamLocationQuery, *params.HasReferenceOperator); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Search != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "search", runtime.ParamLocationQuery, *params.Search); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SearchFields != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "search_fields", runtime.ParamLocationQuery, *params.SearchFields); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "type", runtime.ParamLocationQuery, params.Type); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostSavedObjectsImportRequestWithBody generates requests for PostSavedObjectsImport with any type of body +func NewPostSavedObjectsImportRequestWithBody(server string, params *PostSavedObjectsImportParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/_import") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Overwrite != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "overwrite", runtime.ParamLocationQuery, *params.Overwrite); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.CreateNewCopies != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "createNewCopies", runtime.ParamLocationQuery, *params.CreateNewCopies); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.CompatibilityMode != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "compatibilityMode", runtime.ParamLocationQuery, *params.CompatibilityMode); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewResolveImportErrorsRequestWithBody generates requests for ResolveImportErrors with any type of body +func NewResolveImportErrorsRequestWithBody(server string, params *ResolveImportErrorsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/_resolve_import_errors") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.CompatibilityMode != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "compatibilityMode", runtime.ParamLocationQuery, *params.CompatibilityMode); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.CreateNewCopies != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "createNewCopies", runtime.ParamLocationQuery, *params.CreateNewCopies); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewResolveSavedObjectRequest generates requests for ResolveSavedObject +func NewResolveSavedObjectRequest(server string, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "type", runtime.ParamLocationPath, pType) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/resolve/%s/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateSavedObjectRequest calls the generic CreateSavedObject builder with application/json body +func NewCreateSavedObjectRequest(server string, pType SavedObjectsSavedObjectType, params *CreateSavedObjectParams, body CreateSavedObjectJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateSavedObjectRequestWithBody(server, pType, params, "application/json", bodyReader) +} + +// NewCreateSavedObjectRequestWithBody generates requests for CreateSavedObject with any type of body +func NewCreateSavedObjectRequestWithBody(server string, pType SavedObjectsSavedObjectType, params *CreateSavedObjectParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "type", runtime.ParamLocationPath, pType) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Overwrite != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "overwrite", runtime.ParamLocationQuery, *params.Overwrite); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetSavedObjectRequest generates requests for GetSavedObject +func NewGetSavedObjectRequest(server string, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "type", runtime.ParamLocationPath, pType) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/%s/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateSavedObjectIdRequest calls the generic CreateSavedObjectId builder with application/json body +func NewCreateSavedObjectIdRequest(server string, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, params *CreateSavedObjectIdParams, body CreateSavedObjectIdJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateSavedObjectIdRequestWithBody(server, pType, id, params, "application/json", bodyReader) +} + +// NewCreateSavedObjectIdRequestWithBody generates requests for CreateSavedObjectId with any type of body +func NewCreateSavedObjectIdRequestWithBody(server string, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, params *CreateSavedObjectIdParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "type", runtime.ParamLocationPath, pType) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/%s/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Overwrite != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "overwrite", runtime.ParamLocationQuery, *params.Overwrite); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewUpdateSavedObjectRequest calls the generic UpdateSavedObject builder with application/json body +func NewUpdateSavedObjectRequest(server string, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, body UpdateSavedObjectJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateSavedObjectRequestWithBody(server, pType, id, "application/json", bodyReader) +} + +// NewUpdateSavedObjectRequestWithBody generates requests for UpdateSavedObject with any type of body +func NewUpdateSavedObjectRequestWithBody(server string, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "type", runtime.ParamLocationPath, pType) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/saved_objects/%s/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetSecurityRoleRequest generates requests for GetSecurityRole +func NewGetSecurityRoleRequest(server string, params *GetSecurityRoleParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security/role") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.ReplaceDeprecatedPrivileges != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "replaceDeprecatedPrivileges", runtime.ParamLocationQuery, *params.ReplaceDeprecatedPrivileges); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostSecurityRoleQueryRequest calls the generic PostSecurityRoleQuery builder with application/json body +func NewPostSecurityRoleQueryRequest(server string, body PostSecurityRoleQueryJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSecurityRoleQueryRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostSecurityRoleQueryRequestWithBody generates requests for PostSecurityRoleQuery with any type of body +func NewPostSecurityRoleQueryRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security/role/_query") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteSecurityRoleNameRequest generates requests for DeleteSecurityRoleName +func NewDeleteSecurityRoleNameRequest(server string, name string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security/role/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetSecurityRoleNameRequest generates requests for GetSecurityRoleName +func NewGetSecurityRoleNameRequest(server string, name string, params *GetSecurityRoleNameParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security/role/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.ReplaceDeprecatedPrivileges != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "replaceDeprecatedPrivileges", runtime.ParamLocationQuery, *params.ReplaceDeprecatedPrivileges); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutSecurityRoleNameRequest calls the generic PutSecurityRoleName builder with application/json body +func NewPutSecurityRoleNameRequest(server string, name string, params *PutSecurityRoleNameParams, body PutSecurityRoleNameJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutSecurityRoleNameRequestWithBody(server, name, params, "application/json", bodyReader) +} + +// NewPutSecurityRoleNameRequestWithBody generates requests for PutSecurityRoleName with any type of body +func NewPutSecurityRoleNameRequestWithBody(server string, name string, params *PutSecurityRoleNameParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security/role/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.CreateOnly != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "createOnly", runtime.ParamLocationQuery, *params.CreateOnly); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostSecurityRolesRequest calls the generic PostSecurityRoles builder with application/json body +func NewPostSecurityRolesRequest(server string, body PostSecurityRolesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSecurityRolesRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostSecurityRolesRequestWithBody generates requests for PostSecurityRoles with any type of body +func NewPostSecurityRolesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security/roles") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostSecuritySessionInvalidateRequest calls the generic PostSecuritySessionInvalidate builder with application/json body +func NewPostSecuritySessionInvalidateRequest(server string, body PostSecuritySessionInvalidateJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSecuritySessionInvalidateRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostSecuritySessionInvalidateRequestWithBody generates requests for PostSecuritySessionInvalidate with any type of body +func NewPostSecuritySessionInvalidateRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security/session/_invalidate") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPerformAnonymizationFieldsBulkActionRequest calls the generic PerformAnonymizationFieldsBulkAction builder with application/json body +func NewPerformAnonymizationFieldsBulkActionRequest(server string, body PerformAnonymizationFieldsBulkActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPerformAnonymizationFieldsBulkActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewPerformAnonymizationFieldsBulkActionRequestWithBody generates requests for PerformAnonymizationFieldsBulkAction with any type of body +func NewPerformAnonymizationFieldsBulkActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/anonymization_fields/_bulk_action") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindAnonymizationFieldsRequest generates requests for FindAnonymizationFields +func NewFindAnonymizationFieldsRequest(server string, params *FindAnonymizationFieldsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/anonymization_fields/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Fields != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "fields", runtime.ParamLocationQuery, *params.Fields); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.AllData != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "all_data", runtime.ParamLocationQuery, *params.AllData); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewChatCompleteRequest calls the generic ChatComplete builder with application/json body +func NewChatCompleteRequest(server string, params *ChatCompleteParams, body ChatCompleteJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewChatCompleteRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewChatCompleteRequestWithBody generates requests for ChatComplete with any type of body +func NewChatCompleteRequestWithBody(server string, params *ChatCompleteParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/chat/complete") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.ContentReferencesDisabled != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "content_references_disabled", runtime.ParamLocationQuery, *params.ContentReferencesDisabled); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteAllConversationsRequest calls the generic DeleteAllConversations builder with application/json body +func NewDeleteAllConversationsRequest(server string, body DeleteAllConversationsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteAllConversationsRequestWithBody(server, "application/json", bodyReader) +} + +// NewDeleteAllConversationsRequestWithBody generates requests for DeleteAllConversations with any type of body +func NewDeleteAllConversationsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/current_user/conversations") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateConversationRequest calls the generic CreateConversation builder with application/json body +func NewCreateConversationRequest(server string, body CreateConversationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateConversationRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateConversationRequestWithBody generates requests for CreateConversation with any type of body +func NewCreateConversationRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/current_user/conversations") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindConversationsRequest generates requests for FindConversations +func NewFindConversationsRequest(server string, params *FindConversationsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/current_user/conversations/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Fields != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "fields", runtime.ParamLocationQuery, *params.Fields); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.IsOwner != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "is_owner", runtime.ParamLocationQuery, *params.IsOwner); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteConversationRequest generates requests for DeleteConversation +func NewDeleteConversationRequest(server string, id SecurityAIAssistantAPINonEmptyString) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/current_user/conversations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadConversationRequest generates requests for ReadConversation +func NewReadConversationRequest(server string, id SecurityAIAssistantAPINonEmptyString) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/current_user/conversations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateConversationRequest calls the generic UpdateConversation builder with application/json body +func NewUpdateConversationRequest(server string, id SecurityAIAssistantAPINonEmptyString, body UpdateConversationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateConversationRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewUpdateConversationRequestWithBody generates requests for UpdateConversation with any type of body +func NewUpdateConversationRequestWithBody(server string, id SecurityAIAssistantAPINonEmptyString, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/current_user/conversations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateKnowledgeBaseEntryRequest calls the generic CreateKnowledgeBaseEntry builder with application/json body +func NewCreateKnowledgeBaseEntryRequest(server string, body CreateKnowledgeBaseEntryJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateKnowledgeBaseEntryRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateKnowledgeBaseEntryRequestWithBody generates requests for CreateKnowledgeBaseEntry with any type of body +func NewCreateKnowledgeBaseEntryRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/knowledge_base/entries") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPerformKnowledgeBaseEntryBulkActionRequest calls the generic PerformKnowledgeBaseEntryBulkAction builder with application/json body +func NewPerformKnowledgeBaseEntryBulkActionRequest(server string, body PerformKnowledgeBaseEntryBulkActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPerformKnowledgeBaseEntryBulkActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewPerformKnowledgeBaseEntryBulkActionRequestWithBody generates requests for PerformKnowledgeBaseEntryBulkAction with any type of body +func NewPerformKnowledgeBaseEntryBulkActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/knowledge_base/entries/_bulk_action") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindKnowledgeBaseEntriesRequest generates requests for FindKnowledgeBaseEntries +func NewFindKnowledgeBaseEntriesRequest(server string, params *FindKnowledgeBaseEntriesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/knowledge_base/entries/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Fields != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "fields", runtime.ParamLocationQuery, *params.Fields); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteKnowledgeBaseEntryRequest generates requests for DeleteKnowledgeBaseEntry +func NewDeleteKnowledgeBaseEntryRequest(server string, id SecurityAIAssistantAPINonEmptyString) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/knowledge_base/entries/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewReadKnowledgeBaseEntryRequest generates requests for ReadKnowledgeBaseEntry +func NewReadKnowledgeBaseEntryRequest(server string, id SecurityAIAssistantAPINonEmptyString) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/knowledge_base/entries/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateKnowledgeBaseEntryRequest calls the generic UpdateKnowledgeBaseEntry builder with application/json body +func NewUpdateKnowledgeBaseEntryRequest(server string, id SecurityAIAssistantAPINonEmptyString, body UpdateKnowledgeBaseEntryJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateKnowledgeBaseEntryRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewUpdateKnowledgeBaseEntryRequestWithBody generates requests for UpdateKnowledgeBaseEntry with any type of body +func NewUpdateKnowledgeBaseEntryRequestWithBody(server string, id SecurityAIAssistantAPINonEmptyString, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/knowledge_base/entries/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewReadKnowledgeBaseRequest generates requests for ReadKnowledgeBase +func NewReadKnowledgeBaseRequest(server string, resource string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "resource", runtime.ParamLocationPath, resource) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/knowledge_base/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateKnowledgeBaseRequest generates requests for CreateKnowledgeBase +func NewCreateKnowledgeBaseRequest(server string, resource string, params *CreateKnowledgeBaseParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "resource", runtime.ParamLocationPath, resource) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/knowledge_base/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.ModelId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "modelId", runtime.ParamLocationQuery, *params.ModelId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.IgnoreSecurityLabs != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreSecurityLabs", runtime.ParamLocationQuery, *params.IgnoreSecurityLabs); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPerformPromptsBulkActionRequest calls the generic PerformPromptsBulkAction builder with application/json body +func NewPerformPromptsBulkActionRequest(server string, body PerformPromptsBulkActionJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPerformPromptsBulkActionRequestWithBody(server, "application/json", bodyReader) +} + +// NewPerformPromptsBulkActionRequestWithBody generates requests for PerformPromptsBulkAction with any type of body +func NewPerformPromptsBulkActionRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/prompts/_bulk_action") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindPromptsRequest generates requests for FindPrompts +func NewFindPromptsRequest(server string, params *FindPromptsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/security_ai_assistant/prompts/_find") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Fields != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "fields", runtime.ParamLocationQuery, *params.Fields); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostUrlRequest calls the generic PostUrl builder with application/json body +func NewPostUrlRequest(server string, body PostUrlJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostUrlRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostUrlRequestWithBody generates requests for PostUrl with any type of body +func NewPostUrlRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/short_url") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewResolveUrlRequest generates requests for ResolveUrl +func NewResolveUrlRequest(server string, slug string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "slug", runtime.ParamLocationPath, slug) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/short_url/_slug/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteUrlRequest generates requests for DeleteUrl +func NewDeleteUrlRequest(server string, id ShortURLAPIsIdParam) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/short_url/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetUrlRequest generates requests for GetUrl +func NewGetUrlRequest(server string, id ShortURLAPIsIdParam) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/short_url/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostSpacesCopySavedObjectsRequest calls the generic PostSpacesCopySavedObjects builder with application/json body +func NewPostSpacesCopySavedObjectsRequest(server string, body PostSpacesCopySavedObjectsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSpacesCopySavedObjectsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostSpacesCopySavedObjectsRequestWithBody generates requests for PostSpacesCopySavedObjects with any type of body +func NewPostSpacesCopySavedObjectsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/spaces/_copy_saved_objects") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostSpacesDisableLegacyUrlAliasesRequest calls the generic PostSpacesDisableLegacyUrlAliases builder with application/json body +func NewPostSpacesDisableLegacyUrlAliasesRequest(server string, body PostSpacesDisableLegacyUrlAliasesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSpacesDisableLegacyUrlAliasesRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostSpacesDisableLegacyUrlAliasesRequestWithBody generates requests for PostSpacesDisableLegacyUrlAliases with any type of body +func NewPostSpacesDisableLegacyUrlAliasesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/spaces/_disable_legacy_url_aliases") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostSpacesGetShareableReferencesRequest calls the generic PostSpacesGetShareableReferences builder with application/json body +func NewPostSpacesGetShareableReferencesRequest(server string, body PostSpacesGetShareableReferencesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSpacesGetShareableReferencesRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostSpacesGetShareableReferencesRequestWithBody generates requests for PostSpacesGetShareableReferences with any type of body +func NewPostSpacesGetShareableReferencesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/spaces/_get_shareable_references") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostSpacesResolveCopySavedObjectsErrorsRequest calls the generic PostSpacesResolveCopySavedObjectsErrors builder with application/json body +func NewPostSpacesResolveCopySavedObjectsErrorsRequest(server string, body PostSpacesResolveCopySavedObjectsErrorsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSpacesResolveCopySavedObjectsErrorsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostSpacesResolveCopySavedObjectsErrorsRequestWithBody generates requests for PostSpacesResolveCopySavedObjectsErrors with any type of body +func NewPostSpacesResolveCopySavedObjectsErrorsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/spaces/_resolve_copy_saved_objects_errors") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostSpacesUpdateObjectsSpacesRequest calls the generic PostSpacesUpdateObjectsSpaces builder with application/json body +func NewPostSpacesUpdateObjectsSpacesRequest(server string, body PostSpacesUpdateObjectsSpacesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSpacesUpdateObjectsSpacesRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostSpacesUpdateObjectsSpacesRequestWithBody generates requests for PostSpacesUpdateObjectsSpaces with any type of body +func NewPostSpacesUpdateObjectsSpacesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/spaces/_update_objects_spaces") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetSpacesSpaceRequest generates requests for GetSpacesSpace +func NewGetSpacesSpaceRequest(server string, params *GetSpacesSpaceParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/spaces/space") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Purpose != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "purpose", runtime.ParamLocationQuery, *params.Purpose); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "include_authorized_purposes", runtime.ParamLocationQuery, params.IncludeAuthorizedPurposes); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostSpacesSpaceRequest calls the generic PostSpacesSpace builder with application/json body +func NewPostSpacesSpaceRequest(server string, body PostSpacesSpaceJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSpacesSpaceRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostSpacesSpaceRequestWithBody generates requests for PostSpacesSpace with any type of body +func NewPostSpacesSpaceRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/spaces/space") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteSpacesSpaceIdRequest generates requests for DeleteSpacesSpaceId +func NewDeleteSpacesSpaceIdRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/spaces/space/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetSpacesSpaceIdRequest generates requests for GetSpacesSpaceId +func NewGetSpacesSpaceIdRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/spaces/space/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutSpacesSpaceIdRequest calls the generic PutSpacesSpaceId builder with application/json body +func NewPutSpacesSpaceIdRequest(server string, id string, body PutSpacesSpaceIdJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutSpacesSpaceIdRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewPutSpacesSpaceIdRequestWithBody generates requests for PutSpacesSpaceId with any type of body +func NewPutSpacesSpaceIdRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/spaces/space/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetStatusRequest generates requests for GetStatus +func NewGetStatusRequest(server string, params *GetStatusParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/status") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.V7format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "v7format", runtime.ParamLocationQuery, *params.V7format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.V8format != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "v8format", runtime.ParamLocationQuery, *params.V8format); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetStreamsRequest calls the generic GetStreams builder with application/json body +func NewGetStreamsRequest(server string, body GetStreamsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewGetStreamsRequestWithBody(server, "application/json", bodyReader) +} + +// NewGetStreamsRequestWithBody generates requests for GetStreams with any type of body +func NewGetStreamsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostStreamsDisableRequest calls the generic PostStreamsDisable builder with application/json body +func NewPostStreamsDisableRequest(server string, body PostStreamsDisableJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostStreamsDisableRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostStreamsDisableRequestWithBody generates requests for PostStreamsDisable with any type of body +func NewPostStreamsDisableRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/_disable") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostStreamsEnableRequest calls the generic PostStreamsEnable builder with application/json body +func NewPostStreamsEnableRequest(server string, body PostStreamsEnableJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostStreamsEnableRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostStreamsEnableRequestWithBody generates requests for PostStreamsEnable with any type of body +func NewPostStreamsEnableRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/_enable") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostStreamsResyncRequest calls the generic PostStreamsResync builder with application/json body +func NewPostStreamsResyncRequest(server string, body PostStreamsResyncJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostStreamsResyncRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostStreamsResyncRequestWithBody generates requests for PostStreamsResync with any type of body +func NewPostStreamsResyncRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/_resync") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteStreamsNameRequest calls the generic DeleteStreamsName builder with application/json body +func NewDeleteStreamsNameRequest(server string, name string, body DeleteStreamsNameJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteStreamsNameRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewDeleteStreamsNameRequestWithBody generates requests for DeleteStreamsName with any type of body +func NewDeleteStreamsNameRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetStreamsNameRequest calls the generic GetStreamsName builder with application/json body +func NewGetStreamsNameRequest(server string, name string, body GetStreamsNameJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewGetStreamsNameRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewGetStreamsNameRequestWithBody generates requests for GetStreamsName with any type of body +func NewGetStreamsNameRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPutStreamsNameRequest calls the generic PutStreamsName builder with application/json body +func NewPutStreamsNameRequest(server string, name string, body PutStreamsNameJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutStreamsNameRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewPutStreamsNameRequestWithBody generates requests for PutStreamsName with any type of body +func NewPutStreamsNameRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostStreamsNameForkRequest calls the generic PostStreamsNameFork builder with application/json body +func NewPostStreamsNameForkRequest(server string, name string, body PostStreamsNameForkJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostStreamsNameForkRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewPostStreamsNameForkRequestWithBody generates requests for PostStreamsNameFork with any type of body +func NewPostStreamsNameForkRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/_fork", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetStreamsNameGroupRequest calls the generic GetStreamsNameGroup builder with application/json body +func NewGetStreamsNameGroupRequest(server string, name string, body GetStreamsNameGroupJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewGetStreamsNameGroupRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewGetStreamsNameGroupRequestWithBody generates requests for GetStreamsNameGroup with any type of body +func NewGetStreamsNameGroupRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/_group", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPutStreamsNameGroupRequest calls the generic PutStreamsNameGroup builder with application/json body +func NewPutStreamsNameGroupRequest(server string, name string, body PutStreamsNameGroupJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutStreamsNameGroupRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewPutStreamsNameGroupRequestWithBody generates requests for PutStreamsNameGroup with any type of body +func NewPutStreamsNameGroupRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/_group", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetStreamsNameIngestRequest calls the generic GetStreamsNameIngest builder with application/json body +func NewGetStreamsNameIngestRequest(server string, name string, body GetStreamsNameIngestJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewGetStreamsNameIngestRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewGetStreamsNameIngestRequestWithBody generates requests for GetStreamsNameIngest with any type of body +func NewGetStreamsNameIngestRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/_ingest", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPutStreamsNameIngestRequest calls the generic PutStreamsNameIngest builder with application/json body +func NewPutStreamsNameIngestRequest(server string, name string, body PutStreamsNameIngestJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutStreamsNameIngestRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewPutStreamsNameIngestRequestWithBody generates requests for PutStreamsNameIngest with any type of body +func NewPutStreamsNameIngestRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/_ingest", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostStreamsNameContentExportRequest calls the generic PostStreamsNameContentExport builder with application/json body +func NewPostStreamsNameContentExportRequest(server string, name string, body PostStreamsNameContentExportJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostStreamsNameContentExportRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewPostStreamsNameContentExportRequestWithBody generates requests for PostStreamsNameContentExport with any type of body +func NewPostStreamsNameContentExportRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/content/export", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostStreamsNameContentImportRequestWithBody generates requests for PostStreamsNameContentImport with any type of body +func NewPostStreamsNameContentImportRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/content/import", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetStreamsNameDashboardsRequest calls the generic GetStreamsNameDashboards builder with application/json body +func NewGetStreamsNameDashboardsRequest(server string, name string, body GetStreamsNameDashboardsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewGetStreamsNameDashboardsRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewGetStreamsNameDashboardsRequestWithBody generates requests for GetStreamsNameDashboards with any type of body +func NewGetStreamsNameDashboardsRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/dashboards", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostStreamsNameDashboardsBulkRequest calls the generic PostStreamsNameDashboardsBulk builder with application/json body +func NewPostStreamsNameDashboardsBulkRequest(server string, name string, body PostStreamsNameDashboardsBulkJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostStreamsNameDashboardsBulkRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewPostStreamsNameDashboardsBulkRequestWithBody generates requests for PostStreamsNameDashboardsBulk with any type of body +func NewPostStreamsNameDashboardsBulkRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/dashboards/_bulk", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteStreamsNameDashboardsDashboardidRequest calls the generic DeleteStreamsNameDashboardsDashboardid builder with application/json body +func NewDeleteStreamsNameDashboardsDashboardidRequest(server string, name string, dashboardId string, body DeleteStreamsNameDashboardsDashboardidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteStreamsNameDashboardsDashboardidRequestWithBody(server, name, dashboardId, "application/json", bodyReader) +} + +// NewDeleteStreamsNameDashboardsDashboardidRequestWithBody generates requests for DeleteStreamsNameDashboardsDashboardid with any type of body +func NewDeleteStreamsNameDashboardsDashboardidRequestWithBody(server string, name string, dashboardId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "dashboardId", runtime.ParamLocationPath, dashboardId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/dashboards/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPutStreamsNameDashboardsDashboardidRequest calls the generic PutStreamsNameDashboardsDashboardid builder with application/json body +func NewPutStreamsNameDashboardsDashboardidRequest(server string, name string, dashboardId string, body PutStreamsNameDashboardsDashboardidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutStreamsNameDashboardsDashboardidRequestWithBody(server, name, dashboardId, "application/json", bodyReader) +} + +// NewPutStreamsNameDashboardsDashboardidRequestWithBody generates requests for PutStreamsNameDashboardsDashboardid with any type of body +func NewPutStreamsNameDashboardsDashboardidRequestWithBody(server string, name string, dashboardId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "dashboardId", runtime.ParamLocationPath, dashboardId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/dashboards/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetStreamsNameQueriesRequest calls the generic GetStreamsNameQueries builder with application/json body +func NewGetStreamsNameQueriesRequest(server string, name string, body GetStreamsNameQueriesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewGetStreamsNameQueriesRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewGetStreamsNameQueriesRequestWithBody generates requests for GetStreamsNameQueries with any type of body +func NewGetStreamsNameQueriesRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/queries", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostStreamsNameQueriesBulkRequest calls the generic PostStreamsNameQueriesBulk builder with application/json body +func NewPostStreamsNameQueriesBulkRequest(server string, name string, body PostStreamsNameQueriesBulkJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostStreamsNameQueriesBulkRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewPostStreamsNameQueriesBulkRequestWithBody generates requests for PostStreamsNameQueriesBulk with any type of body +func NewPostStreamsNameQueriesBulkRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/queries/_bulk", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteStreamsNameQueriesQueryidRequest calls the generic DeleteStreamsNameQueriesQueryid builder with application/json body +func NewDeleteStreamsNameQueriesQueryidRequest(server string, name string, queryId string, body DeleteStreamsNameQueriesQueryidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteStreamsNameQueriesQueryidRequestWithBody(server, name, queryId, "application/json", bodyReader) +} + +// NewDeleteStreamsNameQueriesQueryidRequestWithBody generates requests for DeleteStreamsNameQueriesQueryid with any type of body +func NewDeleteStreamsNameQueriesQueryidRequestWithBody(server string, name string, queryId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "queryId", runtime.ParamLocationPath, queryId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/queries/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPutStreamsNameQueriesQueryidRequest calls the generic PutStreamsNameQueriesQueryid builder with application/json body +func NewPutStreamsNameQueriesQueryidRequest(server string, name string, queryId string, body PutStreamsNameQueriesQueryidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutStreamsNameQueriesQueryidRequestWithBody(server, name, queryId, "application/json", bodyReader) +} + +// NewPutStreamsNameQueriesQueryidRequestWithBody generates requests for PutStreamsNameQueriesQueryid with any type of body +func NewPutStreamsNameQueriesQueryidRequestWithBody(server string, name string, queryId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "queryId", runtime.ParamLocationPath, queryId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/queries/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetStreamsNameRulesRequest calls the generic GetStreamsNameRules builder with application/json body +func NewGetStreamsNameRulesRequest(server string, name string, body GetStreamsNameRulesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewGetStreamsNameRulesRequestWithBody(server, name, "application/json", bodyReader) +} + +// NewGetStreamsNameRulesRequestWithBody generates requests for GetStreamsNameRules with any type of body +func NewGetStreamsNameRulesRequestWithBody(server string, name string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/rules", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteStreamsNameRulesRuleidRequest calls the generic DeleteStreamsNameRulesRuleid builder with application/json body +func NewDeleteStreamsNameRulesRuleidRequest(server string, name string, ruleId string, body DeleteStreamsNameRulesRuleidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteStreamsNameRulesRuleidRequestWithBody(server, name, ruleId, "application/json", bodyReader) +} + +// NewDeleteStreamsNameRulesRuleidRequestWithBody generates requests for DeleteStreamsNameRulesRuleid with any type of body +func NewDeleteStreamsNameRulesRuleidRequestWithBody(server string, name string, ruleId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "ruleId", runtime.ParamLocationPath, ruleId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/rules/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPutStreamsNameRulesRuleidRequest calls the generic PutStreamsNameRulesRuleid builder with application/json body +func NewPutStreamsNameRulesRuleidRequest(server string, name string, ruleId string, body PutStreamsNameRulesRuleidJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutStreamsNameRulesRuleidRequestWithBody(server, name, ruleId, "application/json", bodyReader) +} + +// NewPutStreamsNameRulesRuleidRequestWithBody generates requests for PutStreamsNameRulesRuleid with any type of body +func NewPutStreamsNameRulesRuleidRequestWithBody(server string, name string, ruleId string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "ruleId", runtime.ParamLocationPath, ruleId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/rules/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetStreamsNameSignificantEventsRequest calls the generic GetStreamsNameSignificantEvents builder with application/json body +func NewGetStreamsNameSignificantEventsRequest(server string, name string, params *GetStreamsNameSignificantEventsParams, body GetStreamsNameSignificantEventsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewGetStreamsNameSignificantEventsRequestWithBody(server, name, params, "application/json", bodyReader) +} + +// NewGetStreamsNameSignificantEventsRequestWithBody generates requests for GetStreamsNameSignificantEvents with any type of body +func NewGetStreamsNameSignificantEventsRequestWithBody(server string, name string, params *GetStreamsNameSignificantEventsParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/significant_events", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "from", runtime.ParamLocationQuery, params.From); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "to", runtime.ParamLocationQuery, params.To); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "bucketSize", runtime.ParamLocationQuery, params.BucketSize); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetStreamsNameSignificantEventsGenerateRequest calls the generic GetStreamsNameSignificantEventsGenerate builder with application/json body +func NewGetStreamsNameSignificantEventsGenerateRequest(server string, name string, params *GetStreamsNameSignificantEventsGenerateParams, body GetStreamsNameSignificantEventsGenerateJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewGetStreamsNameSignificantEventsGenerateRequestWithBody(server, name, params, "application/json", bodyReader) +} + +// NewGetStreamsNameSignificantEventsGenerateRequestWithBody generates requests for GetStreamsNameSignificantEventsGenerate with any type of body +func NewGetStreamsNameSignificantEventsGenerateRequestWithBody(server string, name string, params *GetStreamsNameSignificantEventsGenerateParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/significant_events/_generate", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "connectorId", runtime.ParamLocationQuery, params.ConnectorId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if params.CurrentDate != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "currentDate", runtime.ParamLocationQuery, *params.CurrentDate); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.ShortLookback != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "shortLookback", runtime.ParamLocationQuery, *params.ShortLookback); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.LongLookback != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "longLookback", runtime.ParamLocationQuery, *params.LongLookback); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostStreamsNameSignificantEventsPreviewRequest calls the generic PostStreamsNameSignificantEventsPreview builder with application/json body +func NewPostStreamsNameSignificantEventsPreviewRequest(server string, name string, params *PostStreamsNameSignificantEventsPreviewParams, body PostStreamsNameSignificantEventsPreviewJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostStreamsNameSignificantEventsPreviewRequestWithBody(server, name, params, "application/json", bodyReader) +} + +// NewPostStreamsNameSignificantEventsPreviewRequestWithBody generates requests for PostStreamsNameSignificantEventsPreview with any type of body +func NewPostStreamsNameSignificantEventsPreviewRequestWithBody(server string, name string, params *PostStreamsNameSignificantEventsPreviewParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "name", runtime.ParamLocationPath, name) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/streams/%s/significant_events/_preview", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "from", runtime.ParamLocationQuery, params.From); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "to", runtime.ParamLocationQuery, params.To); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "bucketSize", runtime.ParamLocationQuery, params.BucketSize); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostSyntheticsMonitorTestRequest generates requests for PostSyntheticsMonitorTest +func NewPostSyntheticsMonitorTestRequest(server string, monitorId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "monitorId", runtime.ParamLocationPath, monitorId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/monitor/test/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetSyntheticMonitorsRequest generates requests for GetSyntheticMonitors +func NewGetSyntheticMonitorsRequest(server string, params *GetSyntheticMonitorsParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/monitors") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.Filter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "filter", runtime.ParamLocationQuery, *params.Filter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Locations != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "locations", runtime.ParamLocationQuery, *params.Locations); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.MonitorTypes != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "monitorTypes", runtime.ParamLocationQuery, *params.MonitorTypes); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "per_page", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Projects != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "projects", runtime.ParamLocationQuery, *params.Projects); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Query != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "query", runtime.ParamLocationQuery, *params.Query); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Schedules != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "schedules", runtime.ParamLocationQuery, *params.Schedules); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortField", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Status != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "status", runtime.ParamLocationQuery, *params.Status); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Tags != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "tags", runtime.ParamLocationQuery, *params.Tags); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.UseLogicalAndFor != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "useLogicalAndFor", runtime.ParamLocationQuery, *params.UseLogicalAndFor); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostSyntheticMonitorsRequest calls the generic PostSyntheticMonitors builder with application/json body +func NewPostSyntheticMonitorsRequest(server string, body PostSyntheticMonitorsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostSyntheticMonitorsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostSyntheticMonitorsRequestWithBody generates requests for PostSyntheticMonitors with any type of body +func NewPostSyntheticMonitorsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/monitors") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteSyntheticMonitorsRequest calls the generic DeleteSyntheticMonitors builder with application/json body +func NewDeleteSyntheticMonitorsRequest(server string, body DeleteSyntheticMonitorsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteSyntheticMonitorsRequestWithBody(server, "application/json", bodyReader) +} + +// NewDeleteSyntheticMonitorsRequestWithBody generates requests for DeleteSyntheticMonitors with any type of body +func NewDeleteSyntheticMonitorsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/monitors/_bulk_delete") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteSyntheticMonitorRequest generates requests for DeleteSyntheticMonitor +func NewDeleteSyntheticMonitorRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/monitors/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetSyntheticMonitorRequest generates requests for GetSyntheticMonitor +func NewGetSyntheticMonitorRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/monitors/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutSyntheticMonitorRequest calls the generic PutSyntheticMonitor builder with application/json body +func NewPutSyntheticMonitorRequest(server string, id string, body PutSyntheticMonitorJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutSyntheticMonitorRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewPutSyntheticMonitorRequestWithBody generates requests for PutSyntheticMonitor with any type of body +func NewPutSyntheticMonitorRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/monitors/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetParametersRequest generates requests for GetParameters +func NewGetParametersRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/params") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostParametersRequest calls the generic PostParameters builder with application/json body +func NewPostParametersRequest(server string, body PostParametersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostParametersRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostParametersRequestWithBody generates requests for PostParameters with any type of body +func NewPostParametersRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/params") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteParametersRequest calls the generic DeleteParameters builder with application/json body +func NewDeleteParametersRequest(server string, body DeleteParametersJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteParametersRequestWithBody(server, "application/json", bodyReader) +} + +// NewDeleteParametersRequestWithBody generates requests for DeleteParameters with any type of body +func NewDeleteParametersRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/params/_bulk_delete") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteParameterRequest generates requests for DeleteParameter +func NewDeleteParameterRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetParameterRequest generates requests for GetParameter +func NewGetParameterRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutParameterRequest calls the generic PutParameter builder with application/json body +func NewPutParameterRequest(server string, id string, body PutParameterJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutParameterRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewPutParameterRequestWithBody generates requests for PutParameter with any type of body +func NewPutParameterRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetPrivateLocationsRequest generates requests for GetPrivateLocations +func NewGetPrivateLocationsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/private_locations") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostPrivateLocationRequest calls the generic PostPrivateLocation builder with application/json body +func NewPostPrivateLocationRequest(server string, body PostPrivateLocationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostPrivateLocationRequestWithBody(server, "application/json", bodyReader) +} + +// NewPostPrivateLocationRequestWithBody generates requests for PostPrivateLocation with any type of body +func NewPostPrivateLocationRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/private_locations") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeletePrivateLocationRequest generates requests for DeletePrivateLocation +func NewDeletePrivateLocationRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/private_locations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetPrivateLocationRequest generates requests for GetPrivateLocation +func NewGetPrivateLocationRequest(server string, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/private_locations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutPrivateLocationRequest calls the generic PutPrivateLocation builder with application/json body +func NewPutPrivateLocationRequest(server string, id string, body PutPrivateLocationJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutPrivateLocationRequestWithBody(server, id, "application/json", bodyReader) +} + +// NewPutPrivateLocationRequestWithBody generates requests for PutPrivateLocation with any type of body +func NewPutPrivateLocationRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/synthetics/private_locations/%s", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewTaskManagerHealthRequest generates requests for TaskManagerHealth +func NewTaskManagerHealthRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/task_manager/_health") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteTimelinesRequest calls the generic DeleteTimelines builder with application/json body +func NewDeleteTimelinesRequest(server string, body DeleteTimelinesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteTimelinesRequestWithBody(server, "application/json", bodyReader) +} + +// NewDeleteTimelinesRequestWithBody generates requests for DeleteTimelines with any type of body +func NewDeleteTimelinesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetTimelineRequest generates requests for GetTimeline +func NewGetTimelineRequest(server string, params *GetTimelineParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.TemplateTimelineId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "template_timeline_id", runtime.ParamLocationQuery, *params.TemplateTimelineId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPatchTimelineRequest calls the generic PatchTimeline builder with application/json body +func NewPatchTimelineRequest(server string, body PatchTimelineJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchTimelineRequestWithBody(server, "application/json", bodyReader) +} + +// NewPatchTimelineRequestWithBody generates requests for PatchTimeline with any type of body +func NewPatchTimelineRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCreateTimelinesRequest calls the generic CreateTimelines builder with application/json body +func NewCreateTimelinesRequest(server string, body CreateTimelinesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateTimelinesRequestWithBody(server, "application/json", bodyReader) +} + +// NewCreateTimelinesRequestWithBody generates requests for CreateTimelines with any type of body +func NewCreateTimelinesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewCopyTimelineRequest calls the generic CopyTimeline builder with application/json body +func NewCopyTimelineRequest(server string, body CopyTimelineJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCopyTimelineRequestWithBody(server, "application/json", bodyReader) +} + +// NewCopyTimelineRequestWithBody generates requests for CopyTimeline with any type of body +func NewCopyTimelineRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline/_copy") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetDraftTimelinesRequest generates requests for GetDraftTimelines +func NewGetDraftTimelinesRequest(server string, params *GetDraftTimelinesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline/_draft") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "timelineType", runtime.ParamLocationQuery, params.TimelineType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCleanDraftTimelinesRequest calls the generic CleanDraftTimelines builder with application/json body +func NewCleanDraftTimelinesRequest(server string, body CleanDraftTimelinesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCleanDraftTimelinesRequestWithBody(server, "application/json", bodyReader) +} + +// NewCleanDraftTimelinesRequestWithBody generates requests for CleanDraftTimelines with any type of body +func NewCleanDraftTimelinesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline/_draft") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewExportTimelinesRequest calls the generic ExportTimelines builder with application/json body +func NewExportTimelinesRequest(server string, params *ExportTimelinesParams, body ExportTimelinesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewExportTimelinesRequestWithBody(server, params, "application/json", bodyReader) +} + +// NewExportTimelinesRequestWithBody generates requests for ExportTimelines with any type of body +func NewExportTimelinesRequestWithBody(server string, params *ExportTimelinesParams, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline/_export") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "file_name", runtime.ParamLocationQuery, params.FileName); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPersistFavoriteRouteRequest calls the generic PersistFavoriteRoute builder with application/json body +func NewPersistFavoriteRouteRequest(server string, body PersistFavoriteRouteJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPersistFavoriteRouteRequestWithBody(server, "application/json", bodyReader) +} + +// NewPersistFavoriteRouteRequestWithBody generates requests for PersistFavoriteRoute with any type of body +func NewPersistFavoriteRouteRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline/_favorite") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewImportTimelinesRequest calls the generic ImportTimelines builder with application/json body +func NewImportTimelinesRequest(server string, body ImportTimelinesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewImportTimelinesRequestWithBody(server, "application/json", bodyReader) +} + +// NewImportTimelinesRequestWithBody generates requests for ImportTimelines with any type of body +func NewImportTimelinesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline/_import") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewInstallPrepackedTimelinesRequest calls the generic InstallPrepackedTimelines builder with application/json body +func NewInstallPrepackedTimelinesRequest(server string, body InstallPrepackedTimelinesJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewInstallPrepackedTimelinesRequestWithBody(server, "application/json", bodyReader) +} + +// NewInstallPrepackedTimelinesRequestWithBody generates requests for InstallPrepackedTimelines with any type of body +func NewInstallPrepackedTimelinesRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline/_prepackaged") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewResolveTimelineRequest generates requests for ResolveTimeline +func NewResolveTimelineRequest(server string, params *ResolveTimelineParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timeline/resolve") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.TemplateTimelineId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "template_timeline_id", runtime.ParamLocationQuery, *params.TemplateTimelineId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Id != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "id", runtime.ParamLocationQuery, *params.Id); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetTimelinesRequest generates requests for GetTimelines +func NewGetTimelinesRequest(server string, params *GetTimelinesParams) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/timelines") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.OnlyUserFavorite != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "only_user_favorite", runtime.ParamLocationQuery, *params.OnlyUserFavorite); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.TimelineType != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "timeline_type", runtime.ParamLocationQuery, *params.TimelineType); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortField != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_field", runtime.ParamLocationQuery, *params.SortField); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortOrder != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sort_order", runtime.ParamLocationQuery, *params.SortOrder); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PageSize != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page_size", runtime.ParamLocationQuery, *params.PageSize); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PageIndex != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page_index", runtime.ParamLocationQuery, *params.PageIndex); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Search != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "search", runtime.ParamLocationQuery, *params.Search); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Status != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "status", runtime.ParamLocationQuery, *params.Status); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetUpgradeStatusRequest generates requests for GetUpgradeStatus +func NewGetUpgradeStatusRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/upgrade_assistant/status") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetUptimeSettingsRequest generates requests for GetUptimeSettings +func NewGetUptimeSettingsRequest(server string) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/uptime/settings") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPutUptimeSettingsRequest calls the generic PutUptimeSettings builder with application/json body +func NewPutUptimeSettingsRequest(server string, body PutUptimeSettingsJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutUptimeSettingsRequestWithBody(server, "application/json", bodyReader) +} + +// NewPutUptimeSettingsRequestWithBody generates requests for PutUptimeSettings with any type of body +func NewPutUptimeSettingsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/api/uptime/settings") + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteActionsConnectorIdRequest generates requests for DeleteActionsConnectorId +func NewDeleteActionsConnectorIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetActionsConnectorIdRequest generates requests for GetActionsConnectorId +func NewGetActionsConnectorIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPostActionsConnectorIdRequest calls the generic PostActionsConnectorId builder with application/json body +func NewPostActionsConnectorIdRequest(server string, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostActionsConnectorIdRequestWithBody(server, spaceId, id, "application/json", bodyReader) +} + +// NewPostActionsConnectorIdRequestWithBody generates requests for PostActionsConnectorId with any type of body +func NewPostActionsConnectorIdRequestWithBody(server string, spaceId SpaceId, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPutActionsConnectorIdRequest calls the generic PutActionsConnectorId builder with application/json body +func NewPutActionsConnectorIdRequest(server string, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPutActionsConnectorIdRequestWithBody(server, spaceId, id, "application/json", bodyReader) +} + +// NewPutActionsConnectorIdRequestWithBody generates requests for PutActionsConnectorId with any type of body +func NewPutActionsConnectorIdRequestWithBody(server string, spaceId SpaceId, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewGetActionsConnectorsRequest generates requests for GetActionsConnectors +func NewGetActionsConnectorsRequest(server string, spaceId SpaceId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/actions/connectors", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetAllDataViewsDefaultRequest generates requests for GetAllDataViewsDefault +func NewGetAllDataViewsDefaultRequest(server string, spaceId SpaceId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/data_views", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateDataViewDefaultwRequest calls the generic CreateDataViewDefaultw builder with application/json body +func NewCreateDataViewDefaultwRequest(server string, spaceId SpaceId, body CreateDataViewDefaultwJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateDataViewDefaultwRequestWithBody(server, spaceId, "application/json", bodyReader) +} + +// NewCreateDataViewDefaultwRequestWithBody generates requests for CreateDataViewDefaultw with any type of body +func NewCreateDataViewDefaultwRequestWithBody(server string, spaceId SpaceId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/data_views/data_view", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteDataViewDefaultRequest generates requests for DeleteDataViewDefault +func NewDeleteDataViewDefaultRequest(server string, spaceId SpaceId, viewId DataViewsViewId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/data_views/data_view/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetDataViewDefaultRequest generates requests for GetDataViewDefault +func NewGetDataViewDefaultRequest(server string, spaceId SpaceId, viewId DataViewsViewId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/data_views/data_view/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateDataViewDefaultRequest calls the generic UpdateDataViewDefault builder with application/json body +func NewUpdateDataViewDefaultRequest(server string, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateDataViewDefaultRequestWithBody(server, spaceId, viewId, "application/json", bodyReader) +} + +// NewUpdateDataViewDefaultRequestWithBody generates requests for UpdateDataViewDefault with any type of body +func NewUpdateDataViewDefaultRequestWithBody(server string, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/data_views/data_view/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewPostMaintenanceWindowRequest calls the generic PostMaintenanceWindow builder with application/json body +func NewPostMaintenanceWindowRequest(server string, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPostMaintenanceWindowRequestWithBody(server, spaceId, "application/json", bodyReader) +} + +// NewPostMaintenanceWindowRequestWithBody generates requests for PostMaintenanceWindow with any type of body +func NewPostMaintenanceWindowRequestWithBody(server string, spaceId SpaceId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/maintenance_window", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteMaintenanceWindowIdRequest generates requests for DeleteMaintenanceWindowId +func NewDeleteMaintenanceWindowIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/maintenance_window/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetMaintenanceWindowIdRequest generates requests for GetMaintenanceWindowId +func NewGetMaintenanceWindowIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/maintenance_window/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewPatchMaintenanceWindowIdRequest calls the generic PatchMaintenanceWindowId builder with application/json body +func NewPatchMaintenanceWindowIdRequest(server string, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewPatchMaintenanceWindowIdRequestWithBody(server, spaceId, id, "application/json", bodyReader) +} + +// NewPatchMaintenanceWindowIdRequestWithBody generates requests for PatchMaintenanceWindowId with any type of body +func NewPatchMaintenanceWindowIdRequestWithBody(server string, spaceId SpaceId, id string, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/maintenance_window/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PATCH", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewFindSlosOpRequest generates requests for FindSlosOp +func NewFindSlosOpRequest(server string, spaceId SLOsSpaceId, params *FindSlosOpParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.KqlQuery != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kqlQuery", runtime.ParamLocationQuery, *params.KqlQuery); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Size != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "size", runtime.ParamLocationQuery, *params.Size); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SearchAfter != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "searchAfter", runtime.ParamLocationQuery, *params.SearchAfter); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortBy != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortBy", runtime.ParamLocationQuery, *params.SortBy); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.SortDirection != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortDirection", runtime.ParamLocationQuery, *params.SortDirection); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.HideStale != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "hideStale", runtime.ParamLocationQuery, *params.HideStale); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewCreateSloOpRequest calls the generic CreateSloOp builder with application/json body +func NewCreateSloOpRequest(server string, spaceId SLOsSpaceId, body CreateSloOpJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewCreateSloOpRequestWithBody(server, spaceId, "application/json", bodyReader) +} + +// NewCreateSloOpRequestWithBody generates requests for CreateSloOp with any type of body +func NewCreateSloOpRequestWithBody(server string, spaceId SLOsSpaceId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewBulkDeleteOpRequest calls the generic BulkDeleteOp builder with application/json body +func NewBulkDeleteOpRequest(server string, spaceId SLOsSpaceId, body BulkDeleteOpJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewBulkDeleteOpRequestWithBody(server, spaceId, "application/json", bodyReader) +} + +// NewBulkDeleteOpRequestWithBody generates requests for BulkDeleteOp with any type of body +func NewBulkDeleteOpRequestWithBody(server string, spaceId SLOsSpaceId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos/_bulk_delete", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewBulkDeleteStatusOpRequest generates requests for BulkDeleteStatusOp +func NewBulkDeleteStatusOpRequest(server string, spaceId SLOsSpaceId, taskId string) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "taskId", runtime.ParamLocationPath, taskId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos/_bulk_delete/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDeleteRollupDataOpRequest calls the generic DeleteRollupDataOp builder with application/json body +func NewDeleteRollupDataOpRequest(server string, spaceId SLOsSpaceId, body DeleteRollupDataOpJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteRollupDataOpRequestWithBody(server, spaceId, "application/json", bodyReader) +} + +// NewDeleteRollupDataOpRequestWithBody generates requests for DeleteRollupDataOp with any type of body +func NewDeleteRollupDataOpRequestWithBody(server string, spaceId SLOsSpaceId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos/_bulk_purge_rollup", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteSloInstancesOpRequest calls the generic DeleteSloInstancesOp builder with application/json body +func NewDeleteSloInstancesOpRequest(server string, spaceId SLOsSpaceId, body DeleteSloInstancesOpJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewDeleteSloInstancesOpRequestWithBody(server, spaceId, "application/json", bodyReader) +} + +// NewDeleteSloInstancesOpRequestWithBody generates requests for DeleteSloInstancesOp with any type of body +func NewDeleteSloInstancesOpRequestWithBody(server string, spaceId SLOsSpaceId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos/_delete_instances", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewDeleteSloOpRequest generates requests for DeleteSloOp +func NewDeleteSloOpRequest(server string, spaceId SLOsSpaceId, sloId SLOsSloId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "sloId", runtime.ParamLocationPath, sloId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("DELETE", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetSloOpRequest generates requests for GetSloOp +func NewGetSloOpRequest(server string, spaceId SLOsSpaceId, sloId SLOsSloId, params *GetSloOpParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "sloId", runtime.ParamLocationPath, sloId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.InstanceId != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "instanceId", runtime.ParamLocationQuery, *params.InstanceId); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewUpdateSloOpRequest calls the generic UpdateSloOp builder with application/json body +func NewUpdateSloOpRequest(server string, spaceId SLOsSpaceId, sloId SLOsSloId, body UpdateSloOpJSONRequestBody) (*http.Request, error) { + var bodyReader io.Reader + buf, err := json.Marshal(body) + if err != nil { + return nil, err + } + bodyReader = bytes.NewReader(buf) + return NewUpdateSloOpRequestWithBody(server, spaceId, sloId, "application/json", bodyReader) +} + +// NewUpdateSloOpRequestWithBody generates requests for UpdateSloOp with any type of body +func NewUpdateSloOpRequestWithBody(server string, spaceId SLOsSpaceId, sloId SLOsSloId, contentType string, body io.Reader) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "sloId", runtime.ParamLocationPath, sloId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos/%s", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("PUT", queryURL.String(), body) + if err != nil { + return nil, err + } + + req.Header.Add("Content-Type", contentType) + + return req, nil +} + +// NewResetSloOpRequest generates requests for ResetSloOp +func NewResetSloOpRequest(server string, spaceId SLOsSpaceId, sloId SLOsSloId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "sloId", runtime.ParamLocationPath, sloId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos/%s/_reset", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewDisableSloOpRequest generates requests for DisableSloOp +func NewDisableSloOpRequest(server string, spaceId SLOsSpaceId, sloId SLOsSloId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "sloId", runtime.ParamLocationPath, sloId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos/%s/disable", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewEnableSloOpRequest generates requests for EnableSloOp +func NewEnableSloOpRequest(server string, spaceId SLOsSpaceId, sloId SLOsSloId) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + var pathParam1 string + + pathParam1, err = runtime.StyleParamWithLocation("simple", false, "sloId", runtime.ParamLocationPath, sloId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/api/observability/slos/%s/enable", pathParam0, pathParam1) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + req, err := http.NewRequest("POST", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +// NewGetDefinitionsOpRequest generates requests for GetDefinitionsOp +func NewGetDefinitionsOpRequest(server string, spaceId SLOsSpaceId, params *GetDefinitionsOpParams) (*http.Request, error) { + var err error + + var pathParam0 string + + pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) + if err != nil { + return nil, err + } + + serverURL, err := url.Parse(server) + if err != nil { + return nil, err + } + + operationPath := fmt.Sprintf("/s/%s/internal/observability/slos/_definitions", pathParam0) + if operationPath[0] == '/' { + operationPath = "." + operationPath + } + + queryURL, err := serverURL.Parse(operationPath) + if err != nil { + return nil, err + } + + if params != nil { + queryValues := queryURL.Query() + + if params.IncludeOutdatedOnly != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "includeOutdatedOnly", runtime.ParamLocationQuery, *params.IncludeOutdatedOnly); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Tags != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "tags", runtime.ParamLocationQuery, *params.Tags); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Search != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "search", runtime.ParamLocationQuery, *params.Search); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.Page != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + if params.PerPage != nil { + + if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { + return nil, err + } else if parsed, err := url.ParseQuery(queryFrag); err != nil { + return nil, err + } else { + for k, v := range parsed { + for _, v2 := range v { + queryValues.Add(k, v2) + } + } + } + + } + + queryURL.RawQuery = queryValues.Encode() + } + + req, err := http.NewRequest("GET", queryURL.String(), nil) + if err != nil { + return nil, err + } + + return req, nil +} + +func (c *Client) applyEditors(ctx context.Context, req *http.Request, additionalEditors []RequestEditorFn) error { + for _, r := range c.RequestEditors { + if err := r(ctx, req); err != nil { + return err + } + } + for _, r := range additionalEditors { + if err := r(ctx, req); err != nil { + return err + } + } + return nil +} + +// ClientWithResponses builds on ClientInterface to offer response payloads +type ClientWithResponses struct { + ClientInterface +} + +// NewClientWithResponses creates a new ClientWithResponses, which wraps +// Client with return type handling +func NewClientWithResponses(server string, opts ...ClientOption) (*ClientWithResponses, error) { + client, err := NewClient(server, opts...) + if err != nil { + return nil, err + } + return &ClientWithResponses{client}, nil +} + +// WithBaseURL overrides the baseURL. +func WithBaseURL(baseURL string) ClientOption { + return func(c *Client) error { + newBaseURL, err := url.Parse(baseURL) + if err != nil { + return err + } + c.Server = newBaseURL.String() + return nil + } +} + +// ClientWithResponsesInterface is the interface specification for the client with responses above. +type ClientWithResponsesInterface interface { + // PostActionsConnectorIdExecuteWithBodyWithResponse request with any body + PostActionsConnectorIdExecuteWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdExecuteResponse, error) + + PostActionsConnectorIdExecuteWithResponse(ctx context.Context, id string, body PostActionsConnectorIdExecuteJSONRequestBody, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdExecuteResponse, error) + + // GetActionsConnectorTypesWithResponse request + GetActionsConnectorTypesWithResponse(ctx context.Context, params *GetActionsConnectorTypesParams, reqEditors ...RequestEditorFn) (*GetActionsConnectorTypesResponse, error) + + // GetAlertingHealthWithResponse request + GetAlertingHealthWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetAlertingHealthResponse, error) + + // DeleteAlertingRuleIdWithResponse request + DeleteAlertingRuleIdWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteAlertingRuleIdResponse, error) + + // GetAlertingRuleIdWithResponse request + GetAlertingRuleIdWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetAlertingRuleIdResponse, error) + + // PostAlertingRuleIdWithBodyWithResponse request with any body + PostAlertingRuleIdWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdResponse, error) + + PostAlertingRuleIdWithResponse(ctx context.Context, id string, body PostAlertingRuleIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdResponse, error) + + // PutAlertingRuleIdWithBodyWithResponse request with any body + PutAlertingRuleIdWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutAlertingRuleIdResponse, error) + + PutAlertingRuleIdWithResponse(ctx context.Context, id string, body PutAlertingRuleIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PutAlertingRuleIdResponse, error) + + // PostAlertingRuleIdDisableWithBodyWithResponse request with any body + PostAlertingRuleIdDisableWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdDisableResponse, error) + + PostAlertingRuleIdDisableWithResponse(ctx context.Context, id string, body PostAlertingRuleIdDisableJSONRequestBody, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdDisableResponse, error) + + // PostAlertingRuleIdEnableWithResponse request + PostAlertingRuleIdEnableWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdEnableResponse, error) + + // PostAlertingRuleIdMuteAllWithResponse request + PostAlertingRuleIdMuteAllWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdMuteAllResponse, error) + + // PostAlertingRuleIdUnmuteAllWithResponse request + PostAlertingRuleIdUnmuteAllWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdUnmuteAllResponse, error) + + // PostAlertingRuleIdUpdateApiKeyWithResponse request + PostAlertingRuleIdUpdateApiKeyWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdUpdateApiKeyResponse, error) + + // PostAlertingRuleIdSnoozeScheduleWithBodyWithResponse request with any body + PostAlertingRuleIdSnoozeScheduleWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdSnoozeScheduleResponse, error) + + PostAlertingRuleIdSnoozeScheduleWithResponse(ctx context.Context, id string, body PostAlertingRuleIdSnoozeScheduleJSONRequestBody, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdSnoozeScheduleResponse, error) + + // DeleteAlertingRuleRuleidSnoozeScheduleScheduleidWithResponse request + DeleteAlertingRuleRuleidSnoozeScheduleScheduleidWithResponse(ctx context.Context, ruleId string, scheduleId string, reqEditors ...RequestEditorFn) (*DeleteAlertingRuleRuleidSnoozeScheduleScheduleidResponse, error) + + // PostAlertingRuleRuleIdAlertAlertIdMuteWithResponse request + PostAlertingRuleRuleIdAlertAlertIdMuteWithResponse(ctx context.Context, ruleId string, alertId string, reqEditors ...RequestEditorFn) (*PostAlertingRuleRuleIdAlertAlertIdMuteResponse, error) + + // PostAlertingRuleRuleIdAlertAlertIdUnmuteWithResponse request + PostAlertingRuleRuleIdAlertAlertIdUnmuteWithResponse(ctx context.Context, ruleId string, alertId string, reqEditors ...RequestEditorFn) (*PostAlertingRuleRuleIdAlertAlertIdUnmuteResponse, error) + + // GetRuleTypesWithResponse request + GetRuleTypesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetRuleTypesResponse, error) + + // GetAlertingRulesFindWithResponse request + GetAlertingRulesFindWithResponse(ctx context.Context, params *GetAlertingRulesFindParams, reqEditors ...RequestEditorFn) (*GetAlertingRulesFindResponse, error) + + // CreateAgentKeyWithBodyWithResponse request with any body + CreateAgentKeyWithBodyWithResponse(ctx context.Context, params *CreateAgentKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAgentKeyResponse, error) + + CreateAgentKeyWithResponse(ctx context.Context, params *CreateAgentKeyParams, body CreateAgentKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAgentKeyResponse, error) + + // SaveApmServerSchemaWithBodyWithResponse request with any body + SaveApmServerSchemaWithBodyWithResponse(ctx context.Context, params *SaveApmServerSchemaParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SaveApmServerSchemaResponse, error) + + SaveApmServerSchemaWithResponse(ctx context.Context, params *SaveApmServerSchemaParams, body SaveApmServerSchemaJSONRequestBody, reqEditors ...RequestEditorFn) (*SaveApmServerSchemaResponse, error) + + // CreateAnnotationWithBodyWithResponse request with any body + CreateAnnotationWithBodyWithResponse(ctx context.Context, serviceName string, params *CreateAnnotationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAnnotationResponse, error) + + CreateAnnotationWithResponse(ctx context.Context, serviceName string, params *CreateAnnotationParams, body CreateAnnotationJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAnnotationResponse, error) + + // GetAnnotationWithResponse request + GetAnnotationWithResponse(ctx context.Context, serviceName string, params *GetAnnotationParams, reqEditors ...RequestEditorFn) (*GetAnnotationResponse, error) + + // DeleteAgentConfigurationWithBodyWithResponse request with any body + DeleteAgentConfigurationWithBodyWithResponse(ctx context.Context, params *DeleteAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteAgentConfigurationResponse, error) + + DeleteAgentConfigurationWithResponse(ctx context.Context, params *DeleteAgentConfigurationParams, body DeleteAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteAgentConfigurationResponse, error) + + // GetAgentConfigurationsWithResponse request + GetAgentConfigurationsWithResponse(ctx context.Context, params *GetAgentConfigurationsParams, reqEditors ...RequestEditorFn) (*GetAgentConfigurationsResponse, error) + + // CreateUpdateAgentConfigurationWithBodyWithResponse request with any body + CreateUpdateAgentConfigurationWithBodyWithResponse(ctx context.Context, params *CreateUpdateAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateUpdateAgentConfigurationResponse, error) + + CreateUpdateAgentConfigurationWithResponse(ctx context.Context, params *CreateUpdateAgentConfigurationParams, body CreateUpdateAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateUpdateAgentConfigurationResponse, error) + + // GetAgentNameForServiceWithResponse request + GetAgentNameForServiceWithResponse(ctx context.Context, params *GetAgentNameForServiceParams, reqEditors ...RequestEditorFn) (*GetAgentNameForServiceResponse, error) + + // GetEnvironmentsForServiceWithResponse request + GetEnvironmentsForServiceWithResponse(ctx context.Context, params *GetEnvironmentsForServiceParams, reqEditors ...RequestEditorFn) (*GetEnvironmentsForServiceResponse, error) + + // SearchSingleConfigurationWithBodyWithResponse request with any body + SearchSingleConfigurationWithBodyWithResponse(ctx context.Context, params *SearchSingleConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SearchSingleConfigurationResponse, error) + + SearchSingleConfigurationWithResponse(ctx context.Context, params *SearchSingleConfigurationParams, body SearchSingleConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*SearchSingleConfigurationResponse, error) + + // GetSingleAgentConfigurationWithResponse request + GetSingleAgentConfigurationWithResponse(ctx context.Context, params *GetSingleAgentConfigurationParams, reqEditors ...RequestEditorFn) (*GetSingleAgentConfigurationResponse, error) + + // GetSourceMapsWithResponse request + GetSourceMapsWithResponse(ctx context.Context, params *GetSourceMapsParams, reqEditors ...RequestEditorFn) (*GetSourceMapsResponse, error) + + // UploadSourceMapWithBodyWithResponse request with any body + UploadSourceMapWithBodyWithResponse(ctx context.Context, params *UploadSourceMapParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UploadSourceMapResponse, error) + + // DeleteSourceMapWithResponse request + DeleteSourceMapWithResponse(ctx context.Context, id string, params *DeleteSourceMapParams, reqEditors ...RequestEditorFn) (*DeleteSourceMapResponse, error) + + // DeleteAssetCriticalityRecordWithResponse request + DeleteAssetCriticalityRecordWithResponse(ctx context.Context, params *DeleteAssetCriticalityRecordParams, reqEditors ...RequestEditorFn) (*DeleteAssetCriticalityRecordResponse, error) + + // GetAssetCriticalityRecordWithResponse request + GetAssetCriticalityRecordWithResponse(ctx context.Context, params *GetAssetCriticalityRecordParams, reqEditors ...RequestEditorFn) (*GetAssetCriticalityRecordResponse, error) + + // CreateAssetCriticalityRecordWithBodyWithResponse request with any body + CreateAssetCriticalityRecordWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAssetCriticalityRecordResponse, error) + + CreateAssetCriticalityRecordWithResponse(ctx context.Context, body CreateAssetCriticalityRecordJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAssetCriticalityRecordResponse, error) + + // BulkUpsertAssetCriticalityRecordsWithBodyWithResponse request with any body + BulkUpsertAssetCriticalityRecordsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkUpsertAssetCriticalityRecordsResponse, error) + + BulkUpsertAssetCriticalityRecordsWithResponse(ctx context.Context, body BulkUpsertAssetCriticalityRecordsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkUpsertAssetCriticalityRecordsResponse, error) + + // FindAssetCriticalityRecordsWithResponse request + FindAssetCriticalityRecordsWithResponse(ctx context.Context, params *FindAssetCriticalityRecordsParams, reqEditors ...RequestEditorFn) (*FindAssetCriticalityRecordsResponse, error) + + // DeleteCaseDefaultSpaceWithResponse request + DeleteCaseDefaultSpaceWithResponse(ctx context.Context, params *DeleteCaseDefaultSpaceParams, reqEditors ...RequestEditorFn) (*DeleteCaseDefaultSpaceResponse, error) + + // UpdateCaseDefaultSpaceWithBodyWithResponse request with any body + UpdateCaseDefaultSpaceWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateCaseDefaultSpaceResponse, error) + + UpdateCaseDefaultSpaceWithResponse(ctx context.Context, body UpdateCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateCaseDefaultSpaceResponse, error) + + // CreateCaseDefaultSpaceWithBodyWithResponse request with any body + CreateCaseDefaultSpaceWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateCaseDefaultSpaceResponse, error) + + CreateCaseDefaultSpaceWithResponse(ctx context.Context, body CreateCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateCaseDefaultSpaceResponse, error) + + // FindCasesDefaultSpaceWithResponse request + FindCasesDefaultSpaceWithResponse(ctx context.Context, params *FindCasesDefaultSpaceParams, reqEditors ...RequestEditorFn) (*FindCasesDefaultSpaceResponse, error) + + // GetCasesByAlertDefaultSpaceWithResponse request + GetCasesByAlertDefaultSpaceWithResponse(ctx context.Context, alertId CasesAlertId, params *GetCasesByAlertDefaultSpaceParams, reqEditors ...RequestEditorFn) (*GetCasesByAlertDefaultSpaceResponse, error) + + // GetCaseConfigurationDefaultSpaceWithResponse request + GetCaseConfigurationDefaultSpaceWithResponse(ctx context.Context, params *GetCaseConfigurationDefaultSpaceParams, reqEditors ...RequestEditorFn) (*GetCaseConfigurationDefaultSpaceResponse, error) + + // SetCaseConfigurationDefaultSpaceWithBodyWithResponse request with any body + SetCaseConfigurationDefaultSpaceWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SetCaseConfigurationDefaultSpaceResponse, error) + + SetCaseConfigurationDefaultSpaceWithResponse(ctx context.Context, body SetCaseConfigurationDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*SetCaseConfigurationDefaultSpaceResponse, error) + + // FindCaseConnectorsDefaultSpaceWithResponse request + FindCaseConnectorsDefaultSpaceWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*FindCaseConnectorsDefaultSpaceResponse, error) + + // UpdateCaseConfigurationDefaultSpaceWithBodyWithResponse request with any body + UpdateCaseConfigurationDefaultSpaceWithBodyWithResponse(ctx context.Context, configurationId CasesConfigurationId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateCaseConfigurationDefaultSpaceResponse, error) + + UpdateCaseConfigurationDefaultSpaceWithResponse(ctx context.Context, configurationId CasesConfigurationId, body UpdateCaseConfigurationDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateCaseConfigurationDefaultSpaceResponse, error) + + // GetCaseReportersDefaultSpaceWithResponse request + GetCaseReportersDefaultSpaceWithResponse(ctx context.Context, params *GetCaseReportersDefaultSpaceParams, reqEditors ...RequestEditorFn) (*GetCaseReportersDefaultSpaceResponse, error) + + // GetCaseTagsDefaultSpaceWithResponse request + GetCaseTagsDefaultSpaceWithResponse(ctx context.Context, params *GetCaseTagsDefaultSpaceParams, reqEditors ...RequestEditorFn) (*GetCaseTagsDefaultSpaceResponse, error) + + // GetCaseDefaultSpaceWithResponse request + GetCaseDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*GetCaseDefaultSpaceResponse, error) + + // GetCaseAlertsDefaultSpaceWithResponse request + GetCaseAlertsDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*GetCaseAlertsDefaultSpaceResponse, error) + + // DeleteCaseCommentsDefaultSpaceWithResponse request + DeleteCaseCommentsDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*DeleteCaseCommentsDefaultSpaceResponse, error) + + // UpdateCaseCommentDefaultSpaceWithBodyWithResponse request with any body + UpdateCaseCommentDefaultSpaceWithBodyWithResponse(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateCaseCommentDefaultSpaceResponse, error) + + UpdateCaseCommentDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, body UpdateCaseCommentDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateCaseCommentDefaultSpaceResponse, error) + + // AddCaseCommentDefaultSpaceWithBodyWithResponse request with any body + AddCaseCommentDefaultSpaceWithBodyWithResponse(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*AddCaseCommentDefaultSpaceResponse, error) + + AddCaseCommentDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, body AddCaseCommentDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*AddCaseCommentDefaultSpaceResponse, error) + + // FindCaseCommentsDefaultSpaceWithResponse request + FindCaseCommentsDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, params *FindCaseCommentsDefaultSpaceParams, reqEditors ...RequestEditorFn) (*FindCaseCommentsDefaultSpaceResponse, error) + + // DeleteCaseCommentDefaultSpaceWithResponse request + DeleteCaseCommentDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, commentId CasesCommentId, reqEditors ...RequestEditorFn) (*DeleteCaseCommentDefaultSpaceResponse, error) + + // GetCaseCommentDefaultSpaceWithResponse request + GetCaseCommentDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, commentId CasesCommentId, reqEditors ...RequestEditorFn) (*GetCaseCommentDefaultSpaceResponse, error) + + // PushCaseDefaultSpaceWithBodyWithResponse request with any body + PushCaseDefaultSpaceWithBodyWithResponse(ctx context.Context, caseId CasesCaseId, connectorId CasesConnectorId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PushCaseDefaultSpaceResponse, error) + + PushCaseDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, connectorId CasesConnectorId, body PushCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*PushCaseDefaultSpaceResponse, error) + + // AddCaseFileDefaultSpaceWithBodyWithResponse request with any body + AddCaseFileDefaultSpaceWithBodyWithResponse(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*AddCaseFileDefaultSpaceResponse, error) + + // FindCaseActivityDefaultSpaceWithResponse request + FindCaseActivityDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, params *FindCaseActivityDefaultSpaceParams, reqEditors ...RequestEditorFn) (*FindCaseActivityDefaultSpaceResponse, error) + + // UpdateFieldsMetadataDefaultWithBodyWithResponse request with any body + UpdateFieldsMetadataDefaultWithBodyWithResponse(ctx context.Context, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateFieldsMetadataDefaultResponse, error) + + UpdateFieldsMetadataDefaultWithResponse(ctx context.Context, viewId DataViewsViewId, body UpdateFieldsMetadataDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateFieldsMetadataDefaultResponse, error) + + // CreateRuntimeFieldDefaultWithBodyWithResponse request with any body + CreateRuntimeFieldDefaultWithBodyWithResponse(ctx context.Context, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateRuntimeFieldDefaultResponse, error) + + CreateRuntimeFieldDefaultWithResponse(ctx context.Context, viewId DataViewsViewId, body CreateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateRuntimeFieldDefaultResponse, error) + + // CreateUpdateRuntimeFieldDefaultWithBodyWithResponse request with any body + CreateUpdateRuntimeFieldDefaultWithBodyWithResponse(ctx context.Context, viewId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateUpdateRuntimeFieldDefaultResponse, error) + + CreateUpdateRuntimeFieldDefaultWithResponse(ctx context.Context, viewId string, body CreateUpdateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateUpdateRuntimeFieldDefaultResponse, error) + + // DeleteRuntimeFieldDefaultWithResponse request + DeleteRuntimeFieldDefaultWithResponse(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, reqEditors ...RequestEditorFn) (*DeleteRuntimeFieldDefaultResponse, error) + + // GetRuntimeFieldDefaultWithResponse request + GetRuntimeFieldDefaultWithResponse(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, reqEditors ...RequestEditorFn) (*GetRuntimeFieldDefaultResponse, error) + + // UpdateRuntimeFieldDefaultWithBodyWithResponse request with any body + UpdateRuntimeFieldDefaultWithBodyWithResponse(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateRuntimeFieldDefaultResponse, error) + + UpdateRuntimeFieldDefaultWithResponse(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, body UpdateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateRuntimeFieldDefaultResponse, error) + + // GetDefaultDataViewDefaultWithResponse request + GetDefaultDataViewDefaultWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetDefaultDataViewDefaultResponse, error) + + // SetDefaultDatailViewDefaultWithBodyWithResponse request with any body + SetDefaultDatailViewDefaultWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SetDefaultDatailViewDefaultResponse, error) + + SetDefaultDatailViewDefaultWithResponse(ctx context.Context, body SetDefaultDatailViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*SetDefaultDatailViewDefaultResponse, error) + + // SwapDataViewsDefaultWithBodyWithResponse request with any body + SwapDataViewsDefaultWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SwapDataViewsDefaultResponse, error) + + SwapDataViewsDefaultWithResponse(ctx context.Context, body SwapDataViewsDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*SwapDataViewsDefaultResponse, error) + + // PreviewSwapDataViewsDefaultWithBodyWithResponse request with any body + PreviewSwapDataViewsDefaultWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PreviewSwapDataViewsDefaultResponse, error) + + PreviewSwapDataViewsDefaultWithResponse(ctx context.Context, body PreviewSwapDataViewsDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*PreviewSwapDataViewsDefaultResponse, error) + + // DeleteAlertsIndexWithResponse request + DeleteAlertsIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*DeleteAlertsIndexResponse, error) + + // ReadAlertsIndexWithResponse request + ReadAlertsIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadAlertsIndexResponse, error) + + // CreateAlertsIndexWithResponse request + CreateAlertsIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*CreateAlertsIndexResponse, error) + + // ReadPrivilegesWithResponse request + ReadPrivilegesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadPrivilegesResponse, error) + + // DeleteRuleWithResponse request + DeleteRuleWithResponse(ctx context.Context, params *DeleteRuleParams, reqEditors ...RequestEditorFn) (*DeleteRuleResponse, error) + + // ReadRuleWithResponse request + ReadRuleWithResponse(ctx context.Context, params *ReadRuleParams, reqEditors ...RequestEditorFn) (*ReadRuleResponse, error) + + // PatchRuleWithBodyWithResponse request with any body + PatchRuleWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchRuleResponse, error) + + PatchRuleWithResponse(ctx context.Context, body PatchRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchRuleResponse, error) + + // CreateRuleWithBodyWithResponse request with any body + CreateRuleWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateRuleResponse, error) + + CreateRuleWithResponse(ctx context.Context, body CreateRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateRuleResponse, error) + + // UpdateRuleWithBodyWithResponse request with any body + UpdateRuleWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateRuleResponse, error) + + UpdateRuleWithResponse(ctx context.Context, body UpdateRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateRuleResponse, error) + + // PerformRulesBulkActionWithBodyWithResponse request with any body + PerformRulesBulkActionWithBodyWithResponse(ctx context.Context, params *PerformRulesBulkActionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PerformRulesBulkActionResponse, error) + + PerformRulesBulkActionWithResponse(ctx context.Context, params *PerformRulesBulkActionParams, body PerformRulesBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*PerformRulesBulkActionResponse, error) + + // ExportRulesWithBodyWithResponse request with any body + ExportRulesWithBodyWithResponse(ctx context.Context, params *ExportRulesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ExportRulesResponse, error) + + ExportRulesWithResponse(ctx context.Context, params *ExportRulesParams, body ExportRulesJSONRequestBody, reqEditors ...RequestEditorFn) (*ExportRulesResponse, error) + + // FindRulesWithResponse request + FindRulesWithResponse(ctx context.Context, params *FindRulesParams, reqEditors ...RequestEditorFn) (*FindRulesResponse, error) + + // ImportRulesWithBodyWithResponse request with any body + ImportRulesWithBodyWithResponse(ctx context.Context, params *ImportRulesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ImportRulesResponse, error) + + // InstallPrebuiltRulesAndTimelinesWithResponse request + InstallPrebuiltRulesAndTimelinesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*InstallPrebuiltRulesAndTimelinesResponse, error) + + // ReadPrebuiltRulesAndTimelinesStatusWithResponse request + ReadPrebuiltRulesAndTimelinesStatusWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadPrebuiltRulesAndTimelinesStatusResponse, error) + + // RulePreviewWithBodyWithResponse request with any body + RulePreviewWithBodyWithResponse(ctx context.Context, params *RulePreviewParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RulePreviewResponse, error) + + RulePreviewWithResponse(ctx context.Context, params *RulePreviewParams, body RulePreviewJSONRequestBody, reqEditors ...RequestEditorFn) (*RulePreviewResponse, error) + + // CreateRuleExceptionListItemsWithBodyWithResponse request with any body + CreateRuleExceptionListItemsWithBodyWithResponse(ctx context.Context, id SecurityExceptionsAPIRuleId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateRuleExceptionListItemsResponse, error) + + CreateRuleExceptionListItemsWithResponse(ctx context.Context, id SecurityExceptionsAPIRuleId, body CreateRuleExceptionListItemsJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateRuleExceptionListItemsResponse, error) + + // SetAlertAssigneesWithBodyWithResponse request with any body + SetAlertAssigneesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SetAlertAssigneesResponse, error) + + SetAlertAssigneesWithResponse(ctx context.Context, body SetAlertAssigneesJSONRequestBody, reqEditors ...RequestEditorFn) (*SetAlertAssigneesResponse, error) + + // FinalizeAlertsMigrationWithBodyWithResponse request with any body + FinalizeAlertsMigrationWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*FinalizeAlertsMigrationResponse, error) + + FinalizeAlertsMigrationWithResponse(ctx context.Context, body FinalizeAlertsMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*FinalizeAlertsMigrationResponse, error) + + // AlertsMigrationCleanupWithBodyWithResponse request with any body + AlertsMigrationCleanupWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*AlertsMigrationCleanupResponse, error) + + AlertsMigrationCleanupWithResponse(ctx context.Context, body AlertsMigrationCleanupJSONRequestBody, reqEditors ...RequestEditorFn) (*AlertsMigrationCleanupResponse, error) + + // CreateAlertsMigrationWithBodyWithResponse request with any body + CreateAlertsMigrationWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAlertsMigrationResponse, error) + + CreateAlertsMigrationWithResponse(ctx context.Context, body CreateAlertsMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAlertsMigrationResponse, error) + + // ReadAlertsMigrationStatusWithResponse request + ReadAlertsMigrationStatusWithResponse(ctx context.Context, params *ReadAlertsMigrationStatusParams, reqEditors ...RequestEditorFn) (*ReadAlertsMigrationStatusResponse, error) + + // SearchAlertsWithBodyWithResponse request with any body + SearchAlertsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SearchAlertsResponse, error) + + SearchAlertsWithResponse(ctx context.Context, body SearchAlertsJSONRequestBody, reqEditors ...RequestEditorFn) (*SearchAlertsResponse, error) + + // SetAlertsStatusWithBodyWithResponse request with any body + SetAlertsStatusWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SetAlertsStatusResponse, error) + + SetAlertsStatusWithResponse(ctx context.Context, body SetAlertsStatusJSONRequestBody, reqEditors ...RequestEditorFn) (*SetAlertsStatusResponse, error) + + // SetAlertTagsWithBodyWithResponse request with any body + SetAlertTagsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SetAlertTagsResponse, error) + + SetAlertTagsWithResponse(ctx context.Context, body SetAlertTagsJSONRequestBody, reqEditors ...RequestEditorFn) (*SetAlertTagsResponse, error) + + // ReadTagsWithResponse request + ReadTagsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadTagsResponse, error) + + // RotateEncryptionKeyWithResponse request + RotateEncryptionKeyWithResponse(ctx context.Context, params *RotateEncryptionKeyParams, reqEditors ...RequestEditorFn) (*RotateEncryptionKeyResponse, error) + + // EndpointGetActionsListWithResponse request + EndpointGetActionsListWithResponse(ctx context.Context, params *EndpointGetActionsListParams, reqEditors ...RequestEditorFn) (*EndpointGetActionsListResponse, error) + + // EndpointExecuteActionWithBodyWithResponse request with any body + EndpointExecuteActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointExecuteActionResponse, error) + + EndpointExecuteActionWithResponse(ctx context.Context, body EndpointExecuteActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointExecuteActionResponse, error) + + // EndpointGetFileActionWithBodyWithResponse request with any body + EndpointGetFileActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointGetFileActionResponse, error) + + EndpointGetFileActionWithResponse(ctx context.Context, body EndpointGetFileActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointGetFileActionResponse, error) + + // EndpointIsolateActionWithBodyWithResponse request with any body + EndpointIsolateActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointIsolateActionResponse, error) + + EndpointIsolateActionWithResponse(ctx context.Context, body EndpointIsolateActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointIsolateActionResponse, error) + + // EndpointKillProcessActionWithBodyWithResponse request with any body + EndpointKillProcessActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointKillProcessActionResponse, error) + + EndpointKillProcessActionWithResponse(ctx context.Context, body EndpointKillProcessActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointKillProcessActionResponse, error) + + // EndpointGetProcessesActionWithBodyWithResponse request with any body + EndpointGetProcessesActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointGetProcessesActionResponse, error) + + EndpointGetProcessesActionWithResponse(ctx context.Context, body EndpointGetProcessesActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointGetProcessesActionResponse, error) + + // RunScriptActionWithBodyWithResponse request with any body + RunScriptActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RunScriptActionResponse, error) + + RunScriptActionWithResponse(ctx context.Context, body RunScriptActionJSONRequestBody, reqEditors ...RequestEditorFn) (*RunScriptActionResponse, error) + + // EndpointScanActionWithBodyWithResponse request with any body + EndpointScanActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointScanActionResponse, error) + + EndpointScanActionWithResponse(ctx context.Context, body EndpointScanActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointScanActionResponse, error) + + // EndpointGetActionsStateWithResponse request + EndpointGetActionsStateWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*EndpointGetActionsStateResponse, error) + + // EndpointSuspendProcessActionWithBodyWithResponse request with any body + EndpointSuspendProcessActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointSuspendProcessActionResponse, error) + + EndpointSuspendProcessActionWithResponse(ctx context.Context, body EndpointSuspendProcessActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointSuspendProcessActionResponse, error) + + // EndpointUnisolateActionWithBodyWithResponse request with any body + EndpointUnisolateActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointUnisolateActionResponse, error) + + EndpointUnisolateActionWithResponse(ctx context.Context, body EndpointUnisolateActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointUnisolateActionResponse, error) + + // EndpointUploadActionWithBodyWithResponse request with any body + EndpointUploadActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointUploadActionResponse, error) + + // EndpointGetActionsDetailsWithResponse request + EndpointGetActionsDetailsWithResponse(ctx context.Context, actionId string, reqEditors ...RequestEditorFn) (*EndpointGetActionsDetailsResponse, error) + + // EndpointFileInfoWithResponse request + EndpointFileInfoWithResponse(ctx context.Context, actionId string, fileId string, reqEditors ...RequestEditorFn) (*EndpointFileInfoResponse, error) + + // EndpointFileDownloadWithResponse request + EndpointFileDownloadWithResponse(ctx context.Context, actionId string, fileId string, reqEditors ...RequestEditorFn) (*EndpointFileDownloadResponse, error) + + // EndpointGetActionsStatusWithResponse request + EndpointGetActionsStatusWithResponse(ctx context.Context, params *EndpointGetActionsStatusParams, reqEditors ...RequestEditorFn) (*EndpointGetActionsStatusResponse, error) + + // GetEndpointMetadataListWithResponse request + GetEndpointMetadataListWithResponse(ctx context.Context, params *GetEndpointMetadataListParams, reqEditors ...RequestEditorFn) (*GetEndpointMetadataListResponse, error) + + // GetEndpointMetadataWithResponse request + GetEndpointMetadataWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetEndpointMetadataResponse, error) + + // GetPolicyResponseWithResponse request + GetPolicyResponseWithResponse(ctx context.Context, params *GetPolicyResponseParams, reqEditors ...RequestEditorFn) (*GetPolicyResponseResponse, error) + + // GetProtectionUpdatesNoteWithResponse request + GetProtectionUpdatesNoteWithResponse(ctx context.Context, packagePolicyId string, reqEditors ...RequestEditorFn) (*GetProtectionUpdatesNoteResponse, error) + + // CreateUpdateProtectionUpdatesNoteWithBodyWithResponse request with any body + CreateUpdateProtectionUpdatesNoteWithBodyWithResponse(ctx context.Context, packagePolicyId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateUpdateProtectionUpdatesNoteResponse, error) + + CreateUpdateProtectionUpdatesNoteWithResponse(ctx context.Context, packagePolicyId string, body CreateUpdateProtectionUpdatesNoteJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateUpdateProtectionUpdatesNoteResponse, error) + + // CreateEndpointListWithResponse request + CreateEndpointListWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*CreateEndpointListResponse, error) + + // DeleteEndpointListItemWithResponse request + DeleteEndpointListItemWithResponse(ctx context.Context, params *DeleteEndpointListItemParams, reqEditors ...RequestEditorFn) (*DeleteEndpointListItemResponse, error) + + // ReadEndpointListItemWithResponse request + ReadEndpointListItemWithResponse(ctx context.Context, params *ReadEndpointListItemParams, reqEditors ...RequestEditorFn) (*ReadEndpointListItemResponse, error) + + // CreateEndpointListItemWithBodyWithResponse request with any body + CreateEndpointListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateEndpointListItemResponse, error) + + CreateEndpointListItemWithResponse(ctx context.Context, body CreateEndpointListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateEndpointListItemResponse, error) + + // UpdateEndpointListItemWithBodyWithResponse request with any body + UpdateEndpointListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateEndpointListItemResponse, error) + + UpdateEndpointListItemWithResponse(ctx context.Context, body UpdateEndpointListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateEndpointListItemResponse, error) + + // FindEndpointListItemsWithResponse request + FindEndpointListItemsWithResponse(ctx context.Context, params *FindEndpointListItemsParams, reqEditors ...RequestEditorFn) (*FindEndpointListItemsResponse, error) + + // DeleteMonitoringEngineWithResponse request + DeleteMonitoringEngineWithResponse(ctx context.Context, params *DeleteMonitoringEngineParams, reqEditors ...RequestEditorFn) (*DeleteMonitoringEngineResponse, error) + + // DisableMonitoringEngineWithResponse request + DisableMonitoringEngineWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*DisableMonitoringEngineResponse, error) + + // InitMonitoringEngineWithResponse request + InitMonitoringEngineWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*InitMonitoringEngineResponse, error) + + // ScheduleMonitoringEngineWithResponse request + ScheduleMonitoringEngineWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ScheduleMonitoringEngineResponse, error) + + // PrivMonHealthWithResponse request + PrivMonHealthWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*PrivMonHealthResponse, error) + + // PrivMonPrivilegesWithResponse request + PrivMonPrivilegesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*PrivMonPrivilegesResponse, error) + + // CreatePrivMonUserWithBodyWithResponse request with any body + CreatePrivMonUserWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreatePrivMonUserResponse, error) + + CreatePrivMonUserWithResponse(ctx context.Context, body CreatePrivMonUserJSONRequestBody, reqEditors ...RequestEditorFn) (*CreatePrivMonUserResponse, error) + + // PrivmonBulkUploadUsersCSVWithBodyWithResponse request with any body + PrivmonBulkUploadUsersCSVWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PrivmonBulkUploadUsersCSVResponse, error) + + // ListPrivMonUsersWithResponse request + ListPrivMonUsersWithResponse(ctx context.Context, params *ListPrivMonUsersParams, reqEditors ...RequestEditorFn) (*ListPrivMonUsersResponse, error) + + // DeletePrivMonUserWithResponse request + DeletePrivMonUserWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeletePrivMonUserResponse, error) + + // UpdatePrivMonUserWithBodyWithResponse request with any body + UpdatePrivMonUserWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdatePrivMonUserResponse, error) + + UpdatePrivMonUserWithResponse(ctx context.Context, id string, body UpdatePrivMonUserJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdatePrivMonUserResponse, error) + + // InstallPrivilegedAccessDetectionPackageWithResponse request + InstallPrivilegedAccessDetectionPackageWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*InstallPrivilegedAccessDetectionPackageResponse, error) + + // GetPrivilegedAccessDetectionPackageStatusWithResponse request + GetPrivilegedAccessDetectionPackageStatusWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetPrivilegedAccessDetectionPackageStatusResponse, error) + + // InitEntityStoreWithBodyWithResponse request with any body + InitEntityStoreWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*InitEntityStoreResponse, error) + + InitEntityStoreWithResponse(ctx context.Context, body InitEntityStoreJSONRequestBody, reqEditors ...RequestEditorFn) (*InitEntityStoreResponse, error) + + // ListEntityEnginesWithResponse request + ListEntityEnginesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListEntityEnginesResponse, error) + + // ApplyEntityEngineDataviewIndicesWithResponse request + ApplyEntityEngineDataviewIndicesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ApplyEntityEngineDataviewIndicesResponse, error) + + // DeleteEntityEngineWithResponse request + DeleteEntityEngineWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, params *DeleteEntityEngineParams, reqEditors ...RequestEditorFn) (*DeleteEntityEngineResponse, error) + + // GetEntityEngineWithResponse request + GetEntityEngineWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*GetEntityEngineResponse, error) + + // InitEntityEngineWithBodyWithResponse request with any body + InitEntityEngineWithBodyWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*InitEntityEngineResponse, error) + + InitEntityEngineWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, body InitEntityEngineJSONRequestBody, reqEditors ...RequestEditorFn) (*InitEntityEngineResponse, error) + + // StartEntityEngineWithResponse request + StartEntityEngineWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*StartEntityEngineResponse, error) + + // StopEntityEngineWithResponse request + StopEntityEngineWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*StopEntityEngineResponse, error) + + // ListEntitiesWithResponse request + ListEntitiesWithResponse(ctx context.Context, params *ListEntitiesParams, reqEditors ...RequestEditorFn) (*ListEntitiesResponse, error) + + // GetEntityStoreStatusWithResponse request + GetEntityStoreStatusWithResponse(ctx context.Context, params *GetEntityStoreStatusParams, reqEditors ...RequestEditorFn) (*GetEntityStoreStatusResponse, error) + + // DeleteExceptionListWithResponse request + DeleteExceptionListWithResponse(ctx context.Context, params *DeleteExceptionListParams, reqEditors ...RequestEditorFn) (*DeleteExceptionListResponse, error) + + // ReadExceptionListWithResponse request + ReadExceptionListWithResponse(ctx context.Context, params *ReadExceptionListParams, reqEditors ...RequestEditorFn) (*ReadExceptionListResponse, error) + + // CreateExceptionListWithBodyWithResponse request with any body + CreateExceptionListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateExceptionListResponse, error) + + CreateExceptionListWithResponse(ctx context.Context, body CreateExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateExceptionListResponse, error) + + // UpdateExceptionListWithBodyWithResponse request with any body + UpdateExceptionListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateExceptionListResponse, error) + + UpdateExceptionListWithResponse(ctx context.Context, body UpdateExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateExceptionListResponse, error) + + // DuplicateExceptionListWithResponse request + DuplicateExceptionListWithResponse(ctx context.Context, params *DuplicateExceptionListParams, reqEditors ...RequestEditorFn) (*DuplicateExceptionListResponse, error) + + // ExportExceptionListWithResponse request + ExportExceptionListWithResponse(ctx context.Context, params *ExportExceptionListParams, reqEditors ...RequestEditorFn) (*ExportExceptionListResponse, error) + + // FindExceptionListsWithResponse request + FindExceptionListsWithResponse(ctx context.Context, params *FindExceptionListsParams, reqEditors ...RequestEditorFn) (*FindExceptionListsResponse, error) + + // ImportExceptionListWithBodyWithResponse request with any body + ImportExceptionListWithBodyWithResponse(ctx context.Context, params *ImportExceptionListParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ImportExceptionListResponse, error) + + // DeleteExceptionListItemWithResponse request + DeleteExceptionListItemWithResponse(ctx context.Context, params *DeleteExceptionListItemParams, reqEditors ...RequestEditorFn) (*DeleteExceptionListItemResponse, error) + + // ReadExceptionListItemWithResponse request + ReadExceptionListItemWithResponse(ctx context.Context, params *ReadExceptionListItemParams, reqEditors ...RequestEditorFn) (*ReadExceptionListItemResponse, error) + + // CreateExceptionListItemWithBodyWithResponse request with any body + CreateExceptionListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateExceptionListItemResponse, error) + + CreateExceptionListItemWithResponse(ctx context.Context, body CreateExceptionListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateExceptionListItemResponse, error) + + // UpdateExceptionListItemWithBodyWithResponse request with any body + UpdateExceptionListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateExceptionListItemResponse, error) + + UpdateExceptionListItemWithResponse(ctx context.Context, body UpdateExceptionListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateExceptionListItemResponse, error) + + // FindExceptionListItemsWithResponse request + FindExceptionListItemsWithResponse(ctx context.Context, params *FindExceptionListItemsParams, reqEditors ...RequestEditorFn) (*FindExceptionListItemsResponse, error) + + // ReadExceptionListSummaryWithResponse request + ReadExceptionListSummaryWithResponse(ctx context.Context, params *ReadExceptionListSummaryParams, reqEditors ...RequestEditorFn) (*ReadExceptionListSummaryResponse, error) + + // CreateSharedExceptionListWithBodyWithResponse request with any body + CreateSharedExceptionListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateSharedExceptionListResponse, error) + + CreateSharedExceptionListWithResponse(ctx context.Context, body CreateSharedExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateSharedExceptionListResponse, error) + + // GetFeaturesWithResponse request + GetFeaturesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFeaturesResponse, error) + + // GetFleetAgentDownloadSourcesWithResponse request + GetFleetAgentDownloadSourcesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetAgentDownloadSourcesResponse, error) + + // PostFleetAgentDownloadSourcesWithBodyWithResponse request with any body + PostFleetAgentDownloadSourcesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentDownloadSourcesResponse, error) + + PostFleetAgentDownloadSourcesWithResponse(ctx context.Context, body PostFleetAgentDownloadSourcesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentDownloadSourcesResponse, error) + + // DeleteFleetAgentDownloadSourcesSourceidWithResponse request + DeleteFleetAgentDownloadSourcesSourceidWithResponse(ctx context.Context, sourceId string, reqEditors ...RequestEditorFn) (*DeleteFleetAgentDownloadSourcesSourceidResponse, error) + + // GetFleetAgentDownloadSourcesSourceidWithResponse request + GetFleetAgentDownloadSourcesSourceidWithResponse(ctx context.Context, sourceId string, reqEditors ...RequestEditorFn) (*GetFleetAgentDownloadSourcesSourceidResponse, error) + + // PutFleetAgentDownloadSourcesSourceidWithBodyWithResponse request with any body + PutFleetAgentDownloadSourcesSourceidWithBodyWithResponse(ctx context.Context, sourceId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetAgentDownloadSourcesSourceidResponse, error) + + PutFleetAgentDownloadSourcesSourceidWithResponse(ctx context.Context, sourceId string, body PutFleetAgentDownloadSourcesSourceidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetAgentDownloadSourcesSourceidResponse, error) + + // GetFleetAgentPoliciesWithResponse request + GetFleetAgentPoliciesWithResponse(ctx context.Context, params *GetFleetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesResponse, error) + + // PostFleetAgentPoliciesWithBodyWithResponse request with any body + PostFleetAgentPoliciesWithBodyWithResponse(ctx context.Context, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesResponse, error) + + PostFleetAgentPoliciesWithResponse(ctx context.Context, params *PostFleetAgentPoliciesParams, body PostFleetAgentPoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesResponse, error) + + // PostFleetAgentPoliciesBulkGetWithBodyWithResponse request with any body + PostFleetAgentPoliciesBulkGetWithBodyWithResponse(ctx context.Context, params *PostFleetAgentPoliciesBulkGetParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesBulkGetResponse, error) + + PostFleetAgentPoliciesBulkGetWithResponse(ctx context.Context, params *PostFleetAgentPoliciesBulkGetParams, body PostFleetAgentPoliciesBulkGetJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesBulkGetResponse, error) + + // PostFleetAgentPoliciesDeleteWithBodyWithResponse request with any body + PostFleetAgentPoliciesDeleteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesDeleteResponse, error) + + PostFleetAgentPoliciesDeleteWithResponse(ctx context.Context, body PostFleetAgentPoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesDeleteResponse, error) + + // PostFleetAgentPoliciesOutputsWithBodyWithResponse request with any body + PostFleetAgentPoliciesOutputsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesOutputsResponse, error) + + PostFleetAgentPoliciesOutputsWithResponse(ctx context.Context, body PostFleetAgentPoliciesOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesOutputsResponse, error) + + // GetFleetAgentPoliciesAgentpolicyidWithResponse request + GetFleetAgentPoliciesAgentpolicyidWithResponse(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidResponse, error) + + // PutFleetAgentPoliciesAgentpolicyidWithBodyWithResponse request with any body + PutFleetAgentPoliciesAgentpolicyidWithBodyWithResponse(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetAgentPoliciesAgentpolicyidResponse, error) + + PutFleetAgentPoliciesAgentpolicyidWithResponse(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, body PutFleetAgentPoliciesAgentpolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetAgentPoliciesAgentpolicyidResponse, error) + + // GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusWithResponse request + GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusWithResponse(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusResponse, error) + + // PostFleetAgentPoliciesAgentpolicyidCopyWithBodyWithResponse request with any body + PostFleetAgentPoliciesAgentpolicyidCopyWithBodyWithResponse(ctx context.Context, agentPolicyId string, params *PostFleetAgentPoliciesAgentpolicyidCopyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesAgentpolicyidCopyResponse, error) + + PostFleetAgentPoliciesAgentpolicyidCopyWithResponse(ctx context.Context, agentPolicyId string, params *PostFleetAgentPoliciesAgentpolicyidCopyParams, body PostFleetAgentPoliciesAgentpolicyidCopyJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesAgentpolicyidCopyResponse, error) + + // GetFleetAgentPoliciesAgentpolicyidDownloadWithResponse request + GetFleetAgentPoliciesAgentpolicyidDownloadWithResponse(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidDownloadParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidDownloadResponse, error) + + // GetFleetAgentPoliciesAgentpolicyidFullWithResponse request + GetFleetAgentPoliciesAgentpolicyidFullWithResponse(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidFullParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidFullResponse, error) + + // GetFleetAgentPoliciesAgentpolicyidOutputsWithResponse request + GetFleetAgentPoliciesAgentpolicyidOutputsWithResponse(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidOutputsResponse, error) + + // GetFleetAgentStatusWithResponse request + GetFleetAgentStatusWithResponse(ctx context.Context, params *GetFleetAgentStatusParams, reqEditors ...RequestEditorFn) (*GetFleetAgentStatusResponse, error) + + // GetFleetAgentStatusDataWithResponse request + GetFleetAgentStatusDataWithResponse(ctx context.Context, params *GetFleetAgentStatusDataParams, reqEditors ...RequestEditorFn) (*GetFleetAgentStatusDataResponse, error) + + // GetFleetAgentsWithResponse request + GetFleetAgentsWithResponse(ctx context.Context, params *GetFleetAgentsParams, reqEditors ...RequestEditorFn) (*GetFleetAgentsResponse, error) + + // PostFleetAgentsWithBodyWithResponse request with any body + PostFleetAgentsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsResponse, error) + + PostFleetAgentsWithResponse(ctx context.Context, body PostFleetAgentsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsResponse, error) + + // GetFleetAgentsActionStatusWithResponse request + GetFleetAgentsActionStatusWithResponse(ctx context.Context, params *GetFleetAgentsActionStatusParams, reqEditors ...RequestEditorFn) (*GetFleetAgentsActionStatusResponse, error) + + // PostFleetAgentsActionsActionidCancelWithResponse request + PostFleetAgentsActionsActionidCancelWithResponse(ctx context.Context, actionId string, reqEditors ...RequestEditorFn) (*PostFleetAgentsActionsActionidCancelResponse, error) + + // GetFleetAgentsAvailableVersionsWithResponse request + GetFleetAgentsAvailableVersionsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetAgentsAvailableVersionsResponse, error) + + // PostFleetAgentsBulkReassignWithBodyWithResponse request with any body + PostFleetAgentsBulkReassignWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkReassignResponse, error) + + PostFleetAgentsBulkReassignWithResponse(ctx context.Context, body PostFleetAgentsBulkReassignJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkReassignResponse, error) + + // PostFleetAgentsBulkRequestDiagnosticsWithBodyWithResponse request with any body + PostFleetAgentsBulkRequestDiagnosticsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkRequestDiagnosticsResponse, error) + + PostFleetAgentsBulkRequestDiagnosticsWithResponse(ctx context.Context, body PostFleetAgentsBulkRequestDiagnosticsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkRequestDiagnosticsResponse, error) + + // PostFleetAgentsBulkUnenrollWithBodyWithResponse request with any body + PostFleetAgentsBulkUnenrollWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUnenrollResponse, error) + + PostFleetAgentsBulkUnenrollWithResponse(ctx context.Context, body PostFleetAgentsBulkUnenrollJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUnenrollResponse, error) + + // PostFleetAgentsBulkUpdateAgentTagsWithBodyWithResponse request with any body + PostFleetAgentsBulkUpdateAgentTagsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUpdateAgentTagsResponse, error) + + PostFleetAgentsBulkUpdateAgentTagsWithResponse(ctx context.Context, body PostFleetAgentsBulkUpdateAgentTagsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUpdateAgentTagsResponse, error) + + // PostFleetAgentsBulkUpgradeWithBodyWithResponse request with any body + PostFleetAgentsBulkUpgradeWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUpgradeResponse, error) + + PostFleetAgentsBulkUpgradeWithResponse(ctx context.Context, body PostFleetAgentsBulkUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUpgradeResponse, error) + + // DeleteFleetAgentsFilesFileidWithResponse request + DeleteFleetAgentsFilesFileidWithResponse(ctx context.Context, fileId string, reqEditors ...RequestEditorFn) (*DeleteFleetAgentsFilesFileidResponse, error) + + // GetFleetAgentsFilesFileidFilenameWithResponse request + GetFleetAgentsFilesFileidFilenameWithResponse(ctx context.Context, fileId string, fileName string, reqEditors ...RequestEditorFn) (*GetFleetAgentsFilesFileidFilenameResponse, error) + + // GetFleetAgentsSetupWithResponse request + GetFleetAgentsSetupWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetAgentsSetupResponse, error) + + // PostFleetAgentsSetupWithResponse request + PostFleetAgentsSetupWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*PostFleetAgentsSetupResponse, error) + + // GetFleetAgentsTagsWithResponse request + GetFleetAgentsTagsWithResponse(ctx context.Context, params *GetFleetAgentsTagsParams, reqEditors ...RequestEditorFn) (*GetFleetAgentsTagsResponse, error) + + // DeleteFleetAgentsAgentidWithResponse request + DeleteFleetAgentsAgentidWithResponse(ctx context.Context, agentId string, reqEditors ...RequestEditorFn) (*DeleteFleetAgentsAgentidResponse, error) + + // GetFleetAgentsAgentidWithResponse request + GetFleetAgentsAgentidWithResponse(ctx context.Context, agentId string, params *GetFleetAgentsAgentidParams, reqEditors ...RequestEditorFn) (*GetFleetAgentsAgentidResponse, error) + + // PutFleetAgentsAgentidWithBodyWithResponse request with any body + PutFleetAgentsAgentidWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetAgentsAgentidResponse, error) + + PutFleetAgentsAgentidWithResponse(ctx context.Context, agentId string, body PutFleetAgentsAgentidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetAgentsAgentidResponse, error) + + // PostFleetAgentsAgentidActionsWithBodyWithResponse request with any body + PostFleetAgentsAgentidActionsWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidActionsResponse, error) + + PostFleetAgentsAgentidActionsWithResponse(ctx context.Context, agentId string, body PostFleetAgentsAgentidActionsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidActionsResponse, error) + + // PostFleetAgentsAgentidReassignWithBodyWithResponse request with any body + PostFleetAgentsAgentidReassignWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidReassignResponse, error) + + PostFleetAgentsAgentidReassignWithResponse(ctx context.Context, agentId string, body PostFleetAgentsAgentidReassignJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidReassignResponse, error) + + // PostFleetAgentsAgentidRequestDiagnosticsWithBodyWithResponse request with any body + PostFleetAgentsAgentidRequestDiagnosticsWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidRequestDiagnosticsResponse, error) + + PostFleetAgentsAgentidRequestDiagnosticsWithResponse(ctx context.Context, agentId string, body PostFleetAgentsAgentidRequestDiagnosticsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidRequestDiagnosticsResponse, error) + + // PostFleetAgentsAgentidUnenrollWithBodyWithResponse request with any body + PostFleetAgentsAgentidUnenrollWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidUnenrollResponse, error) + + PostFleetAgentsAgentidUnenrollWithResponse(ctx context.Context, agentId string, body PostFleetAgentsAgentidUnenrollJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidUnenrollResponse, error) + + // PostFleetAgentsAgentidUpgradeWithBodyWithResponse request with any body + PostFleetAgentsAgentidUpgradeWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidUpgradeResponse, error) + + PostFleetAgentsAgentidUpgradeWithResponse(ctx context.Context, agentId string, body PostFleetAgentsAgentidUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidUpgradeResponse, error) + + // GetFleetAgentsAgentidUploadsWithResponse request + GetFleetAgentsAgentidUploadsWithResponse(ctx context.Context, agentId string, reqEditors ...RequestEditorFn) (*GetFleetAgentsAgentidUploadsResponse, error) + + // GetFleetCheckPermissionsWithResponse request + GetFleetCheckPermissionsWithResponse(ctx context.Context, params *GetFleetCheckPermissionsParams, reqEditors ...RequestEditorFn) (*GetFleetCheckPermissionsResponse, error) + + // GetFleetDataStreamsWithResponse request + GetFleetDataStreamsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetDataStreamsResponse, error) + + // GetFleetEnrollmentApiKeysWithResponse request + GetFleetEnrollmentApiKeysWithResponse(ctx context.Context, params *GetFleetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*GetFleetEnrollmentApiKeysResponse, error) + + // PostFleetEnrollmentApiKeysWithBodyWithResponse request with any body + PostFleetEnrollmentApiKeysWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEnrollmentApiKeysResponse, error) + + PostFleetEnrollmentApiKeysWithResponse(ctx context.Context, body PostFleetEnrollmentApiKeysJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEnrollmentApiKeysResponse, error) + + // DeleteFleetEnrollmentApiKeysKeyidWithResponse request + DeleteFleetEnrollmentApiKeysKeyidWithResponse(ctx context.Context, keyId string, reqEditors ...RequestEditorFn) (*DeleteFleetEnrollmentApiKeysKeyidResponse, error) + + // GetFleetEnrollmentApiKeysKeyidWithResponse request + GetFleetEnrollmentApiKeysKeyidWithResponse(ctx context.Context, keyId string, reqEditors ...RequestEditorFn) (*GetFleetEnrollmentApiKeysKeyidResponse, error) + + // PostFleetEpmBulkAssetsWithBodyWithResponse request with any body + PostFleetEpmBulkAssetsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmBulkAssetsResponse, error) + + PostFleetEpmBulkAssetsWithResponse(ctx context.Context, body PostFleetEpmBulkAssetsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmBulkAssetsResponse, error) + + // GetFleetEpmCategoriesWithResponse request + GetFleetEpmCategoriesWithResponse(ctx context.Context, params *GetFleetEpmCategoriesParams, reqEditors ...RequestEditorFn) (*GetFleetEpmCategoriesResponse, error) + + // PostFleetEpmCustomIntegrationsWithBodyWithResponse request with any body + PostFleetEpmCustomIntegrationsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmCustomIntegrationsResponse, error) + + PostFleetEpmCustomIntegrationsWithResponse(ctx context.Context, body PostFleetEpmCustomIntegrationsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmCustomIntegrationsResponse, error) + + // PutFleetEpmCustomIntegrationsPkgnameWithBodyWithResponse request with any body + PutFleetEpmCustomIntegrationsPkgnameWithBodyWithResponse(ctx context.Context, pkgName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetEpmCustomIntegrationsPkgnameResponse, error) + + PutFleetEpmCustomIntegrationsPkgnameWithResponse(ctx context.Context, pkgName string, body PutFleetEpmCustomIntegrationsPkgnameJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetEpmCustomIntegrationsPkgnameResponse, error) + + // GetFleetEpmDataStreamsWithResponse request + GetFleetEpmDataStreamsWithResponse(ctx context.Context, params *GetFleetEpmDataStreamsParams, reqEditors ...RequestEditorFn) (*GetFleetEpmDataStreamsResponse, error) + + // GetFleetEpmPackagesWithResponse request + GetFleetEpmPackagesWithResponse(ctx context.Context, params *GetFleetEpmPackagesParams, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesResponse, error) + + // PostFleetEpmPackagesWithBodyWithResponse request with any body + PostFleetEpmPackagesWithBodyWithResponse(ctx context.Context, params *PostFleetEpmPackagesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesResponse, error) + + // PostFleetEpmPackagesBulkWithBodyWithResponse request with any body + PostFleetEpmPackagesBulkWithBodyWithResponse(ctx context.Context, params *PostFleetEpmPackagesBulkParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkResponse, error) + + PostFleetEpmPackagesBulkWithResponse(ctx context.Context, params *PostFleetEpmPackagesBulkParams, body PostFleetEpmPackagesBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkResponse, error) + + // PostFleetEpmPackagesBulkUninstallWithBodyWithResponse request with any body + PostFleetEpmPackagesBulkUninstallWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkUninstallResponse, error) + + PostFleetEpmPackagesBulkUninstallWithResponse(ctx context.Context, body PostFleetEpmPackagesBulkUninstallJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkUninstallResponse, error) + + // GetFleetEpmPackagesBulkUninstallTaskidWithResponse request + GetFleetEpmPackagesBulkUninstallTaskidWithResponse(ctx context.Context, taskId string, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesBulkUninstallTaskidResponse, error) + + // PostFleetEpmPackagesBulkUpgradeWithBodyWithResponse request with any body + PostFleetEpmPackagesBulkUpgradeWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkUpgradeResponse, error) + + PostFleetEpmPackagesBulkUpgradeWithResponse(ctx context.Context, body PostFleetEpmPackagesBulkUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkUpgradeResponse, error) + + // GetFleetEpmPackagesBulkUpgradeTaskidWithResponse request + GetFleetEpmPackagesBulkUpgradeTaskidWithResponse(ctx context.Context, taskId string, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesBulkUpgradeTaskidResponse, error) + + // GetFleetEpmPackagesInstalledWithResponse request + GetFleetEpmPackagesInstalledWithResponse(ctx context.Context, params *GetFleetEpmPackagesInstalledParams, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesInstalledResponse, error) + + // GetFleetEpmPackagesLimitedWithResponse request + GetFleetEpmPackagesLimitedWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesLimitedResponse, error) + + // GetFleetEpmPackagesPkgnameStatsWithResponse request + GetFleetEpmPackagesPkgnameStatsWithResponse(ctx context.Context, pkgName string, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesPkgnameStatsResponse, error) + + // DeleteFleetEpmPackagesPkgnamePkgversionWithResponse request + DeleteFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*DeleteFleetEpmPackagesPkgnamePkgversionResponse, error) + + // GetFleetEpmPackagesPkgnamePkgversionWithResponse request + GetFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesPkgnamePkgversionResponse, error) + + // PostFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse request with any body + PostFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionResponse, error) + + PostFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, body PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionResponse, error) + + // PutFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse request with any body + PutFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetEpmPackagesPkgnamePkgversionResponse, error) + + PutFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, body PutFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetEpmPackagesPkgnamePkgversionResponse, error) + + // DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsWithResponse request + DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsParams, reqEditors ...RequestEditorFn) (*DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsResponse, error) + + // DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithResponse request + DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithResponse(ctx context.Context, pkgName string, pkgVersion string, reqEditors ...RequestEditorFn) (*DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse, error) + + // PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithBodyWithResponse request with any body + PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse, error) + + PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithResponse(ctx context.Context, pkgName string, pkgVersion string, body PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse, error) + + // PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeWithBodyWithResponse request with any body + PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse, error) + + PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams, body PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse, error) + + // GetFleetEpmPackagesPkgnamePkgversionFilepathWithResponse request + GetFleetEpmPackagesPkgnamePkgversionFilepathWithResponse(ctx context.Context, pkgName string, pkgVersion string, filePath string, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesPkgnamePkgversionFilepathResponse, error) + + // GetFleetEpmTemplatesPkgnamePkgversionInputsWithResponse request + GetFleetEpmTemplatesPkgnamePkgversionInputsWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmTemplatesPkgnamePkgversionInputsParams, reqEditors ...RequestEditorFn) (*GetFleetEpmTemplatesPkgnamePkgversionInputsResponse, error) + + // GetFleetEpmVerificationKeyIdWithResponse request + GetFleetEpmVerificationKeyIdWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetEpmVerificationKeyIdResponse, error) + + // GetFleetFleetServerHostsWithResponse request + GetFleetFleetServerHostsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetFleetServerHostsResponse, error) + + // PostFleetFleetServerHostsWithBodyWithResponse request with any body + PostFleetFleetServerHostsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetFleetServerHostsResponse, error) + + PostFleetFleetServerHostsWithResponse(ctx context.Context, body PostFleetFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetFleetServerHostsResponse, error) + + // DeleteFleetFleetServerHostsItemidWithResponse request + DeleteFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*DeleteFleetFleetServerHostsItemidResponse, error) + + // GetFleetFleetServerHostsItemidWithResponse request + GetFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*GetFleetFleetServerHostsItemidResponse, error) + + // PutFleetFleetServerHostsItemidWithBodyWithResponse request with any body + PutFleetFleetServerHostsItemidWithBodyWithResponse(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetFleetServerHostsItemidResponse, error) + + PutFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, body PutFleetFleetServerHostsItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetFleetServerHostsItemidResponse, error) + + // PostFleetHealthCheckWithBodyWithResponse request with any body + PostFleetHealthCheckWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetHealthCheckResponse, error) + + PostFleetHealthCheckWithResponse(ctx context.Context, body PostFleetHealthCheckJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetHealthCheckResponse, error) + + // GetFleetKubernetesWithResponse request + GetFleetKubernetesWithResponse(ctx context.Context, params *GetFleetKubernetesParams, reqEditors ...RequestEditorFn) (*GetFleetKubernetesResponse, error) + + // GetFleetKubernetesDownloadWithResponse request + GetFleetKubernetesDownloadWithResponse(ctx context.Context, params *GetFleetKubernetesDownloadParams, reqEditors ...RequestEditorFn) (*GetFleetKubernetesDownloadResponse, error) + + // PostFleetLogstashApiKeysWithResponse request + PostFleetLogstashApiKeysWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*PostFleetLogstashApiKeysResponse, error) + + // PostFleetMessageSigningServiceRotateKeyPairWithResponse request + PostFleetMessageSigningServiceRotateKeyPairWithResponse(ctx context.Context, params *PostFleetMessageSigningServiceRotateKeyPairParams, reqEditors ...RequestEditorFn) (*PostFleetMessageSigningServiceRotateKeyPairResponse, error) + + // GetFleetOutputsWithResponse request + GetFleetOutputsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetOutputsResponse, error) + + // PostFleetOutputsWithBodyWithResponse request with any body + PostFleetOutputsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetOutputsResponse, error) + + PostFleetOutputsWithResponse(ctx context.Context, body PostFleetOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetOutputsResponse, error) + + // DeleteFleetOutputsOutputidWithResponse request + DeleteFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*DeleteFleetOutputsOutputidResponse, error) + + // GetFleetOutputsOutputidWithResponse request + GetFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*GetFleetOutputsOutputidResponse, error) + + // PutFleetOutputsOutputidWithBodyWithResponse request with any body + PutFleetOutputsOutputidWithBodyWithResponse(ctx context.Context, outputId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetOutputsOutputidResponse, error) + + PutFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, body PutFleetOutputsOutputidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetOutputsOutputidResponse, error) + + // GetFleetOutputsOutputidHealthWithResponse request + GetFleetOutputsOutputidHealthWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*GetFleetOutputsOutputidHealthResponse, error) + + // GetFleetPackagePoliciesWithResponse request + GetFleetPackagePoliciesWithResponse(ctx context.Context, params *GetFleetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*GetFleetPackagePoliciesResponse, error) + + // PostFleetPackagePoliciesWithBodyWithResponse request with any body + PostFleetPackagePoliciesWithBodyWithResponse(ctx context.Context, params *PostFleetPackagePoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesResponse, error) + + PostFleetPackagePoliciesWithResponse(ctx context.Context, params *PostFleetPackagePoliciesParams, body PostFleetPackagePoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesResponse, error) + + // PostFleetPackagePoliciesBulkGetWithBodyWithResponse request with any body + PostFleetPackagePoliciesBulkGetWithBodyWithResponse(ctx context.Context, params *PostFleetPackagePoliciesBulkGetParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesBulkGetResponse, error) + + PostFleetPackagePoliciesBulkGetWithResponse(ctx context.Context, params *PostFleetPackagePoliciesBulkGetParams, body PostFleetPackagePoliciesBulkGetJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesBulkGetResponse, error) + + // PostFleetPackagePoliciesDeleteWithBodyWithResponse request with any body + PostFleetPackagePoliciesDeleteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesDeleteResponse, error) + + PostFleetPackagePoliciesDeleteWithResponse(ctx context.Context, body PostFleetPackagePoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesDeleteResponse, error) + + // PostFleetPackagePoliciesUpgradeWithBodyWithResponse request with any body + PostFleetPackagePoliciesUpgradeWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesUpgradeResponse, error) + + PostFleetPackagePoliciesUpgradeWithResponse(ctx context.Context, body PostFleetPackagePoliciesUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesUpgradeResponse, error) + + // PostFleetPackagePoliciesUpgradeDryrunWithBodyWithResponse request with any body + PostFleetPackagePoliciesUpgradeDryrunWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesUpgradeDryrunResponse, error) + + PostFleetPackagePoliciesUpgradeDryrunWithResponse(ctx context.Context, body PostFleetPackagePoliciesUpgradeDryrunJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesUpgradeDryrunResponse, error) + + // DeleteFleetPackagePoliciesPackagepolicyidWithResponse request + DeleteFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *DeleteFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*DeleteFleetPackagePoliciesPackagepolicyidResponse, error) + + // GetFleetPackagePoliciesPackagepolicyidWithResponse request + GetFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *GetFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*GetFleetPackagePoliciesPackagepolicyidResponse, error) + + // PutFleetPackagePoliciesPackagepolicyidWithBodyWithResponse request with any body + PutFleetPackagePoliciesPackagepolicyidWithBodyWithResponse(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetPackagePoliciesPackagepolicyidResponse, error) + + PutFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, body PutFleetPackagePoliciesPackagepolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetPackagePoliciesPackagepolicyidResponse, error) + + // GetFleetProxiesWithResponse request + GetFleetProxiesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetProxiesResponse, error) + + // PostFleetProxiesWithBodyWithResponse request with any body + PostFleetProxiesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetProxiesResponse, error) + + PostFleetProxiesWithResponse(ctx context.Context, body PostFleetProxiesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetProxiesResponse, error) + + // DeleteFleetProxiesItemidWithResponse request + DeleteFleetProxiesItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*DeleteFleetProxiesItemidResponse, error) + + // GetFleetProxiesItemidWithResponse request + GetFleetProxiesItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*GetFleetProxiesItemidResponse, error) + + // PutFleetProxiesItemidWithBodyWithResponse request with any body + PutFleetProxiesItemidWithBodyWithResponse(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetProxiesItemidResponse, error) + + PutFleetProxiesItemidWithResponse(ctx context.Context, itemId string, body PutFleetProxiesItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetProxiesItemidResponse, error) + + // GetFleetRemoteSyncedIntegrationsStatusWithResponse request + GetFleetRemoteSyncedIntegrationsStatusWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetRemoteSyncedIntegrationsStatusResponse, error) + + // GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusWithResponse request + GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusResponse, error) + + // PostFleetServiceTokensWithBodyWithResponse request with any body + PostFleetServiceTokensWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetServiceTokensResponse, error) + + PostFleetServiceTokensWithResponse(ctx context.Context, body PostFleetServiceTokensJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetServiceTokensResponse, error) + + // GetFleetSettingsWithResponse request + GetFleetSettingsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetSettingsResponse, error) + + // PutFleetSettingsWithBodyWithResponse request with any body + PutFleetSettingsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetSettingsResponse, error) + + PutFleetSettingsWithResponse(ctx context.Context, body PutFleetSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetSettingsResponse, error) + + // PostFleetSetupWithResponse request + PostFleetSetupWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*PostFleetSetupResponse, error) + + // GetFleetSpaceSettingsWithResponse request + GetFleetSpaceSettingsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetSpaceSettingsResponse, error) + + // PutFleetSpaceSettingsWithBodyWithResponse request with any body + PutFleetSpaceSettingsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetSpaceSettingsResponse, error) + + PutFleetSpaceSettingsWithResponse(ctx context.Context, body PutFleetSpaceSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetSpaceSettingsResponse, error) + + // GetFleetUninstallTokensWithResponse request + GetFleetUninstallTokensWithResponse(ctx context.Context, params *GetFleetUninstallTokensParams, reqEditors ...RequestEditorFn) (*GetFleetUninstallTokensResponse, error) + + // GetFleetUninstallTokensUninstalltokenidWithResponse request + GetFleetUninstallTokensUninstalltokenidWithResponse(ctx context.Context, uninstallTokenId string, reqEditors ...RequestEditorFn) (*GetFleetUninstallTokensUninstalltokenidResponse, error) + + // DeleteListWithResponse request + DeleteListWithResponse(ctx context.Context, params *DeleteListParams, reqEditors ...RequestEditorFn) (*DeleteListResponse, error) + + // ReadListWithResponse request + ReadListWithResponse(ctx context.Context, params *ReadListParams, reqEditors ...RequestEditorFn) (*ReadListResponse, error) + + // PatchListWithBodyWithResponse request with any body + PatchListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchListResponse, error) + + PatchListWithResponse(ctx context.Context, body PatchListJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchListResponse, error) + + // CreateListWithBodyWithResponse request with any body + CreateListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateListResponse, error) + + CreateListWithResponse(ctx context.Context, body CreateListJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateListResponse, error) + + // UpdateListWithBodyWithResponse request with any body + UpdateListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateListResponse, error) + + UpdateListWithResponse(ctx context.Context, body UpdateListJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateListResponse, error) + + // FindListsWithResponse request + FindListsWithResponse(ctx context.Context, params *FindListsParams, reqEditors ...RequestEditorFn) (*FindListsResponse, error) + + // DeleteListIndexWithResponse request + DeleteListIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*DeleteListIndexResponse, error) + + // ReadListIndexWithResponse request + ReadListIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadListIndexResponse, error) + + // CreateListIndexWithResponse request + CreateListIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*CreateListIndexResponse, error) + + // DeleteListItemWithResponse request + DeleteListItemWithResponse(ctx context.Context, params *DeleteListItemParams, reqEditors ...RequestEditorFn) (*DeleteListItemResponse, error) + + // ReadListItemWithResponse request + ReadListItemWithResponse(ctx context.Context, params *ReadListItemParams, reqEditors ...RequestEditorFn) (*ReadListItemResponse, error) + + // PatchListItemWithBodyWithResponse request with any body + PatchListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchListItemResponse, error) + + PatchListItemWithResponse(ctx context.Context, body PatchListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchListItemResponse, error) + + // CreateListItemWithBodyWithResponse request with any body + CreateListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateListItemResponse, error) + + CreateListItemWithResponse(ctx context.Context, body CreateListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateListItemResponse, error) + + // UpdateListItemWithBodyWithResponse request with any body + UpdateListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateListItemResponse, error) + + UpdateListItemWithResponse(ctx context.Context, body UpdateListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateListItemResponse, error) + + // ExportListItemsWithResponse request + ExportListItemsWithResponse(ctx context.Context, params *ExportListItemsParams, reqEditors ...RequestEditorFn) (*ExportListItemsResponse, error) + + // FindListItemsWithResponse request + FindListItemsWithResponse(ctx context.Context, params *FindListItemsParams, reqEditors ...RequestEditorFn) (*FindListItemsResponse, error) + + // ImportListItemsWithBodyWithResponse request with any body + ImportListItemsWithBodyWithResponse(ctx context.Context, params *ImportListItemsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ImportListItemsResponse, error) + + // ReadListPrivilegesWithResponse request + ReadListPrivilegesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadListPrivilegesResponse, error) + + // DeleteLogstashPipelineWithResponse request + DeleteLogstashPipelineWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteLogstashPipelineResponse, error) + + // GetLogstashPipelineWithResponse request + GetLogstashPipelineWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetLogstashPipelineResponse, error) + + // PutLogstashPipelineWithBodyWithResponse request with any body + PutLogstashPipelineWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutLogstashPipelineResponse, error) + + PutLogstashPipelineWithResponse(ctx context.Context, id string, body PutLogstashPipelineJSONRequestBody, reqEditors ...RequestEditorFn) (*PutLogstashPipelineResponse, error) + + // GetLogstashPipelinesWithResponse request + GetLogstashPipelinesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetLogstashPipelinesResponse, error) + + // PostMaintenanceWindowIdArchiveWithResponse request + PostMaintenanceWindowIdArchiveWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowIdArchiveResponse, error) + + // PostMaintenanceWindowIdUnarchiveWithResponse request + PostMaintenanceWindowIdUnarchiveWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowIdUnarchiveResponse, error) + + // MlSyncWithResponse request + MlSyncWithResponse(ctx context.Context, params *MlSyncParams, reqEditors ...RequestEditorFn) (*MlSyncResponse, error) + + // DeleteNoteWithBodyWithResponse request with any body + DeleteNoteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteNoteResponse, error) + + DeleteNoteWithResponse(ctx context.Context, body DeleteNoteJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteNoteResponse, error) + + // GetNotesWithResponse request + GetNotesWithResponse(ctx context.Context, params *GetNotesParams, reqEditors ...RequestEditorFn) (*GetNotesResponse, error) + + // PersistNoteRouteWithBodyWithResponse request with any body + PersistNoteRouteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PersistNoteRouteResponse, error) + + PersistNoteRouteWithResponse(ctx context.Context, body PersistNoteRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*PersistNoteRouteResponse, error) + + // ObservabilityAiAssistantChatCompleteWithBodyWithResponse request with any body + ObservabilityAiAssistantChatCompleteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ObservabilityAiAssistantChatCompleteResponse, error) + + ObservabilityAiAssistantChatCompleteWithResponse(ctx context.Context, body ObservabilityAiAssistantChatCompleteJSONRequestBody, reqEditors ...RequestEditorFn) (*ObservabilityAiAssistantChatCompleteResponse, error) + + // OsqueryFindLiveQueriesWithResponse request + OsqueryFindLiveQueriesWithResponse(ctx context.Context, params *OsqueryFindLiveQueriesParams, reqEditors ...RequestEditorFn) (*OsqueryFindLiveQueriesResponse, error) + + // OsqueryCreateLiveQueryWithBodyWithResponse request with any body + OsqueryCreateLiveQueryWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*OsqueryCreateLiveQueryResponse, error) + + OsqueryCreateLiveQueryWithResponse(ctx context.Context, body OsqueryCreateLiveQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*OsqueryCreateLiveQueryResponse, error) + + // OsqueryGetLiveQueryDetailsWithResponse request + OsqueryGetLiveQueryDetailsWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*OsqueryGetLiveQueryDetailsResponse, error) + + // OsqueryGetLiveQueryResultsWithResponse request + OsqueryGetLiveQueryResultsWithResponse(ctx context.Context, id string, actionId string, params *OsqueryGetLiveQueryResultsParams, reqEditors ...RequestEditorFn) (*OsqueryGetLiveQueryResultsResponse, error) + + // OsqueryFindPacksWithResponse request + OsqueryFindPacksWithResponse(ctx context.Context, params *OsqueryFindPacksParams, reqEditors ...RequestEditorFn) (*OsqueryFindPacksResponse, error) + + // OsqueryCreatePacksWithBodyWithResponse request with any body + OsqueryCreatePacksWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*OsqueryCreatePacksResponse, error) + + OsqueryCreatePacksWithResponse(ctx context.Context, body OsqueryCreatePacksJSONRequestBody, reqEditors ...RequestEditorFn) (*OsqueryCreatePacksResponse, error) + + // OsqueryDeletePacksWithResponse request + OsqueryDeletePacksWithResponse(ctx context.Context, id SecurityOsqueryAPIPackId, reqEditors ...RequestEditorFn) (*OsqueryDeletePacksResponse, error) + + // OsqueryGetPacksDetailsWithResponse request + OsqueryGetPacksDetailsWithResponse(ctx context.Context, id SecurityOsqueryAPIPackId, reqEditors ...RequestEditorFn) (*OsqueryGetPacksDetailsResponse, error) + + // OsqueryUpdatePacksWithBodyWithResponse request with any body + OsqueryUpdatePacksWithBodyWithResponse(ctx context.Context, id SecurityOsqueryAPIPackId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*OsqueryUpdatePacksResponse, error) + + OsqueryUpdatePacksWithResponse(ctx context.Context, id SecurityOsqueryAPIPackId, body OsqueryUpdatePacksJSONRequestBody, reqEditors ...RequestEditorFn) (*OsqueryUpdatePacksResponse, error) + + // OsqueryFindSavedQueriesWithResponse request + OsqueryFindSavedQueriesWithResponse(ctx context.Context, params *OsqueryFindSavedQueriesParams, reqEditors ...RequestEditorFn) (*OsqueryFindSavedQueriesResponse, error) + + // OsqueryCreateSavedQueryWithBodyWithResponse request with any body + OsqueryCreateSavedQueryWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*OsqueryCreateSavedQueryResponse, error) + + OsqueryCreateSavedQueryWithResponse(ctx context.Context, body OsqueryCreateSavedQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*OsqueryCreateSavedQueryResponse, error) + + // OsqueryDeleteSavedQueryWithResponse request + OsqueryDeleteSavedQueryWithResponse(ctx context.Context, id SecurityOsqueryAPISavedQueryId, reqEditors ...RequestEditorFn) (*OsqueryDeleteSavedQueryResponse, error) + + // OsqueryGetSavedQueryDetailsWithResponse request + OsqueryGetSavedQueryDetailsWithResponse(ctx context.Context, id SecurityOsqueryAPISavedQueryId, reqEditors ...RequestEditorFn) (*OsqueryGetSavedQueryDetailsResponse, error) + + // OsqueryUpdateSavedQueryWithBodyWithResponse request with any body + OsqueryUpdateSavedQueryWithBodyWithResponse(ctx context.Context, id SecurityOsqueryAPISavedQueryId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*OsqueryUpdateSavedQueryResponse, error) + + OsqueryUpdateSavedQueryWithResponse(ctx context.Context, id SecurityOsqueryAPISavedQueryId, body OsqueryUpdateSavedQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*OsqueryUpdateSavedQueryResponse, error) + + // PersistPinnedEventRouteWithBodyWithResponse request with any body + PersistPinnedEventRouteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PersistPinnedEventRouteResponse, error) + + PersistPinnedEventRouteWithResponse(ctx context.Context, body PersistPinnedEventRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*PersistPinnedEventRouteResponse, error) + + // CleanUpRiskEngineWithResponse request + CleanUpRiskEngineWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*CleanUpRiskEngineResponse, error) + + // ConfigureRiskEngineSavedObjectWithBodyWithResponse request with any body + ConfigureRiskEngineSavedObjectWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ConfigureRiskEngineSavedObjectResponse, error) + + ConfigureRiskEngineSavedObjectWithResponse(ctx context.Context, body ConfigureRiskEngineSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*ConfigureRiskEngineSavedObjectResponse, error) + + // ScheduleRiskEngineNowWithBodyWithResponse request with any body + ScheduleRiskEngineNowWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ScheduleRiskEngineNowResponse, error) + + ScheduleRiskEngineNowWithResponse(ctx context.Context, body ScheduleRiskEngineNowJSONRequestBody, reqEditors ...RequestEditorFn) (*ScheduleRiskEngineNowResponse, error) + + // BulkCreateSavedObjectsWithBodyWithResponse request with any body + BulkCreateSavedObjectsWithBodyWithResponse(ctx context.Context, params *BulkCreateSavedObjectsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkCreateSavedObjectsResponse, error) + + BulkCreateSavedObjectsWithResponse(ctx context.Context, params *BulkCreateSavedObjectsParams, body BulkCreateSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkCreateSavedObjectsResponse, error) + + // BulkDeleteSavedObjectsWithBodyWithResponse request with any body + BulkDeleteSavedObjectsWithBodyWithResponse(ctx context.Context, params *BulkDeleteSavedObjectsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkDeleteSavedObjectsResponse, error) + + BulkDeleteSavedObjectsWithResponse(ctx context.Context, params *BulkDeleteSavedObjectsParams, body BulkDeleteSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkDeleteSavedObjectsResponse, error) + + // BulkGetSavedObjectsWithBodyWithResponse request with any body + BulkGetSavedObjectsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkGetSavedObjectsResponse, error) + + BulkGetSavedObjectsWithResponse(ctx context.Context, body BulkGetSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkGetSavedObjectsResponse, error) + + // BulkResolveSavedObjectsWithBodyWithResponse request with any body + BulkResolveSavedObjectsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkResolveSavedObjectsResponse, error) + + BulkResolveSavedObjectsWithResponse(ctx context.Context, body BulkResolveSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkResolveSavedObjectsResponse, error) + + // BulkUpdateSavedObjectsWithBodyWithResponse request with any body + BulkUpdateSavedObjectsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkUpdateSavedObjectsResponse, error) + + BulkUpdateSavedObjectsWithResponse(ctx context.Context, body BulkUpdateSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkUpdateSavedObjectsResponse, error) + + // PostSavedObjectsExportWithBodyWithResponse request with any body + PostSavedObjectsExportWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSavedObjectsExportResponse, error) + + PostSavedObjectsExportWithResponse(ctx context.Context, body PostSavedObjectsExportJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSavedObjectsExportResponse, error) + + // FindSavedObjectsWithResponse request + FindSavedObjectsWithResponse(ctx context.Context, params *FindSavedObjectsParams, reqEditors ...RequestEditorFn) (*FindSavedObjectsResponse, error) + + // PostSavedObjectsImportWithBodyWithResponse request with any body + PostSavedObjectsImportWithBodyWithResponse(ctx context.Context, params *PostSavedObjectsImportParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSavedObjectsImportResponse, error) + + // ResolveImportErrorsWithBodyWithResponse request with any body + ResolveImportErrorsWithBodyWithResponse(ctx context.Context, params *ResolveImportErrorsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ResolveImportErrorsResponse, error) + + // ResolveSavedObjectWithResponse request + ResolveSavedObjectWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, reqEditors ...RequestEditorFn) (*ResolveSavedObjectResponse, error) + + // CreateSavedObjectWithBodyWithResponse request with any body + CreateSavedObjectWithBodyWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, params *CreateSavedObjectParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateSavedObjectResponse, error) + + CreateSavedObjectWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, params *CreateSavedObjectParams, body CreateSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateSavedObjectResponse, error) + + // GetSavedObjectWithResponse request + GetSavedObjectWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, reqEditors ...RequestEditorFn) (*GetSavedObjectResponse, error) + + // CreateSavedObjectIdWithBodyWithResponse request with any body + CreateSavedObjectIdWithBodyWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, params *CreateSavedObjectIdParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateSavedObjectIdResponse, error) + + CreateSavedObjectIdWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, params *CreateSavedObjectIdParams, body CreateSavedObjectIdJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateSavedObjectIdResponse, error) + + // UpdateSavedObjectWithBodyWithResponse request with any body + UpdateSavedObjectWithBodyWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateSavedObjectResponse, error) + + UpdateSavedObjectWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, body UpdateSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateSavedObjectResponse, error) + + // GetSecurityRoleWithResponse request + GetSecurityRoleWithResponse(ctx context.Context, params *GetSecurityRoleParams, reqEditors ...RequestEditorFn) (*GetSecurityRoleResponse, error) + + // PostSecurityRoleQueryWithBodyWithResponse request with any body + PostSecurityRoleQueryWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSecurityRoleQueryResponse, error) + + PostSecurityRoleQueryWithResponse(ctx context.Context, body PostSecurityRoleQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSecurityRoleQueryResponse, error) + + // DeleteSecurityRoleNameWithResponse request + DeleteSecurityRoleNameWithResponse(ctx context.Context, name string, reqEditors ...RequestEditorFn) (*DeleteSecurityRoleNameResponse, error) + + // GetSecurityRoleNameWithResponse request + GetSecurityRoleNameWithResponse(ctx context.Context, name string, params *GetSecurityRoleNameParams, reqEditors ...RequestEditorFn) (*GetSecurityRoleNameResponse, error) + + // PutSecurityRoleNameWithBodyWithResponse request with any body + PutSecurityRoleNameWithBodyWithResponse(ctx context.Context, name string, params *PutSecurityRoleNameParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutSecurityRoleNameResponse, error) + + PutSecurityRoleNameWithResponse(ctx context.Context, name string, params *PutSecurityRoleNameParams, body PutSecurityRoleNameJSONRequestBody, reqEditors ...RequestEditorFn) (*PutSecurityRoleNameResponse, error) + + // PostSecurityRolesWithBodyWithResponse request with any body + PostSecurityRolesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSecurityRolesResponse, error) + + PostSecurityRolesWithResponse(ctx context.Context, body PostSecurityRolesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSecurityRolesResponse, error) + + // PostSecuritySessionInvalidateWithBodyWithResponse request with any body + PostSecuritySessionInvalidateWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSecuritySessionInvalidateResponse, error) + + PostSecuritySessionInvalidateWithResponse(ctx context.Context, body PostSecuritySessionInvalidateJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSecuritySessionInvalidateResponse, error) + + // PerformAnonymizationFieldsBulkActionWithBodyWithResponse request with any body + PerformAnonymizationFieldsBulkActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PerformAnonymizationFieldsBulkActionResponse, error) + + PerformAnonymizationFieldsBulkActionWithResponse(ctx context.Context, body PerformAnonymizationFieldsBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*PerformAnonymizationFieldsBulkActionResponse, error) + + // FindAnonymizationFieldsWithResponse request + FindAnonymizationFieldsWithResponse(ctx context.Context, params *FindAnonymizationFieldsParams, reqEditors ...RequestEditorFn) (*FindAnonymizationFieldsResponse, error) + + // ChatCompleteWithBodyWithResponse request with any body + ChatCompleteWithBodyWithResponse(ctx context.Context, params *ChatCompleteParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ChatCompleteResponse, error) + + ChatCompleteWithResponse(ctx context.Context, params *ChatCompleteParams, body ChatCompleteJSONRequestBody, reqEditors ...RequestEditorFn) (*ChatCompleteResponse, error) + + // DeleteAllConversationsWithBodyWithResponse request with any body + DeleteAllConversationsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteAllConversationsResponse, error) + + DeleteAllConversationsWithResponse(ctx context.Context, body DeleteAllConversationsJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteAllConversationsResponse, error) + + // CreateConversationWithBodyWithResponse request with any body + CreateConversationWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateConversationResponse, error) + + CreateConversationWithResponse(ctx context.Context, body CreateConversationJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateConversationResponse, error) + + // FindConversationsWithResponse request + FindConversationsWithResponse(ctx context.Context, params *FindConversationsParams, reqEditors ...RequestEditorFn) (*FindConversationsResponse, error) + + // DeleteConversationWithResponse request + DeleteConversationWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*DeleteConversationResponse, error) + + // ReadConversationWithResponse request + ReadConversationWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*ReadConversationResponse, error) + + // UpdateConversationWithBodyWithResponse request with any body + UpdateConversationWithBodyWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateConversationResponse, error) + + UpdateConversationWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, body UpdateConversationJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateConversationResponse, error) + + // CreateKnowledgeBaseEntryWithBodyWithResponse request with any body + CreateKnowledgeBaseEntryWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateKnowledgeBaseEntryResponse, error) + + CreateKnowledgeBaseEntryWithResponse(ctx context.Context, body CreateKnowledgeBaseEntryJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateKnowledgeBaseEntryResponse, error) + + // PerformKnowledgeBaseEntryBulkActionWithBodyWithResponse request with any body + PerformKnowledgeBaseEntryBulkActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PerformKnowledgeBaseEntryBulkActionResponse, error) + + PerformKnowledgeBaseEntryBulkActionWithResponse(ctx context.Context, body PerformKnowledgeBaseEntryBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*PerformKnowledgeBaseEntryBulkActionResponse, error) + + // FindKnowledgeBaseEntriesWithResponse request + FindKnowledgeBaseEntriesWithResponse(ctx context.Context, params *FindKnowledgeBaseEntriesParams, reqEditors ...RequestEditorFn) (*FindKnowledgeBaseEntriesResponse, error) + + // DeleteKnowledgeBaseEntryWithResponse request + DeleteKnowledgeBaseEntryWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*DeleteKnowledgeBaseEntryResponse, error) + + // ReadKnowledgeBaseEntryWithResponse request + ReadKnowledgeBaseEntryWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*ReadKnowledgeBaseEntryResponse, error) + + // UpdateKnowledgeBaseEntryWithBodyWithResponse request with any body + UpdateKnowledgeBaseEntryWithBodyWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateKnowledgeBaseEntryResponse, error) + + UpdateKnowledgeBaseEntryWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, body UpdateKnowledgeBaseEntryJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateKnowledgeBaseEntryResponse, error) + + // ReadKnowledgeBaseWithResponse request + ReadKnowledgeBaseWithResponse(ctx context.Context, resource string, reqEditors ...RequestEditorFn) (*ReadKnowledgeBaseResponse, error) + + // CreateKnowledgeBaseWithResponse request + CreateKnowledgeBaseWithResponse(ctx context.Context, resource string, params *CreateKnowledgeBaseParams, reqEditors ...RequestEditorFn) (*CreateKnowledgeBaseResponse, error) + + // PerformPromptsBulkActionWithBodyWithResponse request with any body + PerformPromptsBulkActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PerformPromptsBulkActionResponse, error) + + PerformPromptsBulkActionWithResponse(ctx context.Context, body PerformPromptsBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*PerformPromptsBulkActionResponse, error) + + // FindPromptsWithResponse request + FindPromptsWithResponse(ctx context.Context, params *FindPromptsParams, reqEditors ...RequestEditorFn) (*FindPromptsResponse, error) + + // PostUrlWithBodyWithResponse request with any body + PostUrlWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostUrlResponse, error) + + PostUrlWithResponse(ctx context.Context, body PostUrlJSONRequestBody, reqEditors ...RequestEditorFn) (*PostUrlResponse, error) + + // ResolveUrlWithResponse request + ResolveUrlWithResponse(ctx context.Context, slug string, reqEditors ...RequestEditorFn) (*ResolveUrlResponse, error) + + // DeleteUrlWithResponse request + DeleteUrlWithResponse(ctx context.Context, id ShortURLAPIsIdParam, reqEditors ...RequestEditorFn) (*DeleteUrlResponse, error) + + // GetUrlWithResponse request + GetUrlWithResponse(ctx context.Context, id ShortURLAPIsIdParam, reqEditors ...RequestEditorFn) (*GetUrlResponse, error) + + // PostSpacesCopySavedObjectsWithBodyWithResponse request with any body + PostSpacesCopySavedObjectsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesCopySavedObjectsResponse, error) + + PostSpacesCopySavedObjectsWithResponse(ctx context.Context, body PostSpacesCopySavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesCopySavedObjectsResponse, error) + + // PostSpacesDisableLegacyUrlAliasesWithBodyWithResponse request with any body + PostSpacesDisableLegacyUrlAliasesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesDisableLegacyUrlAliasesResponse, error) + + PostSpacesDisableLegacyUrlAliasesWithResponse(ctx context.Context, body PostSpacesDisableLegacyUrlAliasesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesDisableLegacyUrlAliasesResponse, error) + + // PostSpacesGetShareableReferencesWithBodyWithResponse request with any body + PostSpacesGetShareableReferencesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesGetShareableReferencesResponse, error) + + PostSpacesGetShareableReferencesWithResponse(ctx context.Context, body PostSpacesGetShareableReferencesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesGetShareableReferencesResponse, error) + + // PostSpacesResolveCopySavedObjectsErrorsWithBodyWithResponse request with any body + PostSpacesResolveCopySavedObjectsErrorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesResolveCopySavedObjectsErrorsResponse, error) + + PostSpacesResolveCopySavedObjectsErrorsWithResponse(ctx context.Context, body PostSpacesResolveCopySavedObjectsErrorsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesResolveCopySavedObjectsErrorsResponse, error) + + // PostSpacesUpdateObjectsSpacesWithBodyWithResponse request with any body + PostSpacesUpdateObjectsSpacesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesUpdateObjectsSpacesResponse, error) + + PostSpacesUpdateObjectsSpacesWithResponse(ctx context.Context, body PostSpacesUpdateObjectsSpacesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesUpdateObjectsSpacesResponse, error) + + // GetSpacesSpaceWithResponse request + GetSpacesSpaceWithResponse(ctx context.Context, params *GetSpacesSpaceParams, reqEditors ...RequestEditorFn) (*GetSpacesSpaceResponse, error) + + // PostSpacesSpaceWithBodyWithResponse request with any body + PostSpacesSpaceWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesSpaceResponse, error) + + PostSpacesSpaceWithResponse(ctx context.Context, body PostSpacesSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesSpaceResponse, error) + + // DeleteSpacesSpaceIdWithResponse request + DeleteSpacesSpaceIdWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteSpacesSpaceIdResponse, error) + + // GetSpacesSpaceIdWithResponse request + GetSpacesSpaceIdWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetSpacesSpaceIdResponse, error) + + // PutSpacesSpaceIdWithBodyWithResponse request with any body + PutSpacesSpaceIdWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutSpacesSpaceIdResponse, error) + + PutSpacesSpaceIdWithResponse(ctx context.Context, id string, body PutSpacesSpaceIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PutSpacesSpaceIdResponse, error) + + // GetStatusWithResponse request + GetStatusWithResponse(ctx context.Context, params *GetStatusParams, reqEditors ...RequestEditorFn) (*GetStatusResponse, error) + + // GetStreamsWithBodyWithResponse request with any body + GetStreamsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsResponse, error) + + GetStreamsWithResponse(ctx context.Context, body GetStreamsJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsResponse, error) + + // PostStreamsDisableWithBodyWithResponse request with any body + PostStreamsDisableWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsDisableResponse, error) + + PostStreamsDisableWithResponse(ctx context.Context, body PostStreamsDisableJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsDisableResponse, error) + + // PostStreamsEnableWithBodyWithResponse request with any body + PostStreamsEnableWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsEnableResponse, error) + + PostStreamsEnableWithResponse(ctx context.Context, body PostStreamsEnableJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsEnableResponse, error) + + // PostStreamsResyncWithBodyWithResponse request with any body + PostStreamsResyncWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsResyncResponse, error) + + PostStreamsResyncWithResponse(ctx context.Context, body PostStreamsResyncJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsResyncResponse, error) + + // DeleteStreamsNameWithBodyWithResponse request with any body + DeleteStreamsNameWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteStreamsNameResponse, error) + + DeleteStreamsNameWithResponse(ctx context.Context, name string, body DeleteStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteStreamsNameResponse, error) + + // GetStreamsNameWithBodyWithResponse request with any body + GetStreamsNameWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameResponse, error) + + GetStreamsNameWithResponse(ctx context.Context, name string, body GetStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameResponse, error) + + // PutStreamsNameWithBodyWithResponse request with any body + PutStreamsNameWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameResponse, error) + + PutStreamsNameWithResponse(ctx context.Context, name string, body PutStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameResponse, error) + + // PostStreamsNameForkWithBodyWithResponse request with any body + PostStreamsNameForkWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameForkResponse, error) + + PostStreamsNameForkWithResponse(ctx context.Context, name string, body PostStreamsNameForkJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsNameForkResponse, error) + + // GetStreamsNameGroupWithBodyWithResponse request with any body + GetStreamsNameGroupWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameGroupResponse, error) + + GetStreamsNameGroupWithResponse(ctx context.Context, name string, body GetStreamsNameGroupJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameGroupResponse, error) + + // PutStreamsNameGroupWithBodyWithResponse request with any body + PutStreamsNameGroupWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameGroupResponse, error) + + PutStreamsNameGroupWithResponse(ctx context.Context, name string, body PutStreamsNameGroupJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameGroupResponse, error) + + // GetStreamsNameIngestWithBodyWithResponse request with any body + GetStreamsNameIngestWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameIngestResponse, error) + + GetStreamsNameIngestWithResponse(ctx context.Context, name string, body GetStreamsNameIngestJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameIngestResponse, error) + + // PutStreamsNameIngestWithBodyWithResponse request with any body + PutStreamsNameIngestWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameIngestResponse, error) + + PutStreamsNameIngestWithResponse(ctx context.Context, name string, body PutStreamsNameIngestJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameIngestResponse, error) + + // PostStreamsNameContentExportWithBodyWithResponse request with any body + PostStreamsNameContentExportWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameContentExportResponse, error) + + PostStreamsNameContentExportWithResponse(ctx context.Context, name string, body PostStreamsNameContentExportJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsNameContentExportResponse, error) + + // PostStreamsNameContentImportWithBodyWithResponse request with any body + PostStreamsNameContentImportWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameContentImportResponse, error) + + // GetStreamsNameDashboardsWithBodyWithResponse request with any body + GetStreamsNameDashboardsWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameDashboardsResponse, error) + + GetStreamsNameDashboardsWithResponse(ctx context.Context, name string, body GetStreamsNameDashboardsJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameDashboardsResponse, error) + + // PostStreamsNameDashboardsBulkWithBodyWithResponse request with any body + PostStreamsNameDashboardsBulkWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameDashboardsBulkResponse, error) + + PostStreamsNameDashboardsBulkWithResponse(ctx context.Context, name string, body PostStreamsNameDashboardsBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsNameDashboardsBulkResponse, error) + + // DeleteStreamsNameDashboardsDashboardidWithBodyWithResponse request with any body + DeleteStreamsNameDashboardsDashboardidWithBodyWithResponse(ctx context.Context, name string, dashboardId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteStreamsNameDashboardsDashboardidResponse, error) + + DeleteStreamsNameDashboardsDashboardidWithResponse(ctx context.Context, name string, dashboardId string, body DeleteStreamsNameDashboardsDashboardidJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteStreamsNameDashboardsDashboardidResponse, error) + + // PutStreamsNameDashboardsDashboardidWithBodyWithResponse request with any body + PutStreamsNameDashboardsDashboardidWithBodyWithResponse(ctx context.Context, name string, dashboardId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameDashboardsDashboardidResponse, error) + + PutStreamsNameDashboardsDashboardidWithResponse(ctx context.Context, name string, dashboardId string, body PutStreamsNameDashboardsDashboardidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameDashboardsDashboardidResponse, error) + + // GetStreamsNameQueriesWithBodyWithResponse request with any body + GetStreamsNameQueriesWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameQueriesResponse, error) + + GetStreamsNameQueriesWithResponse(ctx context.Context, name string, body GetStreamsNameQueriesJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameQueriesResponse, error) + + // PostStreamsNameQueriesBulkWithBodyWithResponse request with any body + PostStreamsNameQueriesBulkWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameQueriesBulkResponse, error) + + PostStreamsNameQueriesBulkWithResponse(ctx context.Context, name string, body PostStreamsNameQueriesBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsNameQueriesBulkResponse, error) + + // DeleteStreamsNameQueriesQueryidWithBodyWithResponse request with any body + DeleteStreamsNameQueriesQueryidWithBodyWithResponse(ctx context.Context, name string, queryId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteStreamsNameQueriesQueryidResponse, error) + + DeleteStreamsNameQueriesQueryidWithResponse(ctx context.Context, name string, queryId string, body DeleteStreamsNameQueriesQueryidJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteStreamsNameQueriesQueryidResponse, error) + + // PutStreamsNameQueriesQueryidWithBodyWithResponse request with any body + PutStreamsNameQueriesQueryidWithBodyWithResponse(ctx context.Context, name string, queryId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameQueriesQueryidResponse, error) + + PutStreamsNameQueriesQueryidWithResponse(ctx context.Context, name string, queryId string, body PutStreamsNameQueriesQueryidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameQueriesQueryidResponse, error) + + // GetStreamsNameRulesWithBodyWithResponse request with any body + GetStreamsNameRulesWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameRulesResponse, error) + + GetStreamsNameRulesWithResponse(ctx context.Context, name string, body GetStreamsNameRulesJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameRulesResponse, error) + + // DeleteStreamsNameRulesRuleidWithBodyWithResponse request with any body + DeleteStreamsNameRulesRuleidWithBodyWithResponse(ctx context.Context, name string, ruleId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteStreamsNameRulesRuleidResponse, error) + + DeleteStreamsNameRulesRuleidWithResponse(ctx context.Context, name string, ruleId string, body DeleteStreamsNameRulesRuleidJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteStreamsNameRulesRuleidResponse, error) + + // PutStreamsNameRulesRuleidWithBodyWithResponse request with any body + PutStreamsNameRulesRuleidWithBodyWithResponse(ctx context.Context, name string, ruleId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameRulesRuleidResponse, error) + + PutStreamsNameRulesRuleidWithResponse(ctx context.Context, name string, ruleId string, body PutStreamsNameRulesRuleidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameRulesRuleidResponse, error) + + // GetStreamsNameSignificantEventsWithBodyWithResponse request with any body + GetStreamsNameSignificantEventsWithBodyWithResponse(ctx context.Context, name string, params *GetStreamsNameSignificantEventsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameSignificantEventsResponse, error) + + GetStreamsNameSignificantEventsWithResponse(ctx context.Context, name string, params *GetStreamsNameSignificantEventsParams, body GetStreamsNameSignificantEventsJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameSignificantEventsResponse, error) + + // GetStreamsNameSignificantEventsGenerateWithBodyWithResponse request with any body + GetStreamsNameSignificantEventsGenerateWithBodyWithResponse(ctx context.Context, name string, params *GetStreamsNameSignificantEventsGenerateParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameSignificantEventsGenerateResponse, error) + + GetStreamsNameSignificantEventsGenerateWithResponse(ctx context.Context, name string, params *GetStreamsNameSignificantEventsGenerateParams, body GetStreamsNameSignificantEventsGenerateJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameSignificantEventsGenerateResponse, error) + + // PostStreamsNameSignificantEventsPreviewWithBodyWithResponse request with any body + PostStreamsNameSignificantEventsPreviewWithBodyWithResponse(ctx context.Context, name string, params *PostStreamsNameSignificantEventsPreviewParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameSignificantEventsPreviewResponse, error) + + PostStreamsNameSignificantEventsPreviewWithResponse(ctx context.Context, name string, params *PostStreamsNameSignificantEventsPreviewParams, body PostStreamsNameSignificantEventsPreviewJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsNameSignificantEventsPreviewResponse, error) + + // PostSyntheticsMonitorTestWithResponse request + PostSyntheticsMonitorTestWithResponse(ctx context.Context, monitorId string, reqEditors ...RequestEditorFn) (*PostSyntheticsMonitorTestResponse, error) + + // GetSyntheticMonitorsWithResponse request + GetSyntheticMonitorsWithResponse(ctx context.Context, params *GetSyntheticMonitorsParams, reqEditors ...RequestEditorFn) (*GetSyntheticMonitorsResponse, error) + + // PostSyntheticMonitorsWithBodyWithResponse request with any body + PostSyntheticMonitorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSyntheticMonitorsResponse, error) + + PostSyntheticMonitorsWithResponse(ctx context.Context, body PostSyntheticMonitorsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSyntheticMonitorsResponse, error) + + // DeleteSyntheticMonitorsWithBodyWithResponse request with any body + DeleteSyntheticMonitorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteSyntheticMonitorsResponse, error) + + DeleteSyntheticMonitorsWithResponse(ctx context.Context, body DeleteSyntheticMonitorsJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteSyntheticMonitorsResponse, error) + + // DeleteSyntheticMonitorWithResponse request + DeleteSyntheticMonitorWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteSyntheticMonitorResponse, error) + + // GetSyntheticMonitorWithResponse request + GetSyntheticMonitorWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetSyntheticMonitorResponse, error) + + // PutSyntheticMonitorWithBodyWithResponse request with any body + PutSyntheticMonitorWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutSyntheticMonitorResponse, error) + + PutSyntheticMonitorWithResponse(ctx context.Context, id string, body PutSyntheticMonitorJSONRequestBody, reqEditors ...RequestEditorFn) (*PutSyntheticMonitorResponse, error) + + // GetParametersWithResponse request + GetParametersWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetParametersResponse, error) + + // PostParametersWithBodyWithResponse request with any body + PostParametersWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostParametersResponse, error) + + PostParametersWithResponse(ctx context.Context, body PostParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*PostParametersResponse, error) + + // DeleteParametersWithBodyWithResponse request with any body + DeleteParametersWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteParametersResponse, error) + + DeleteParametersWithResponse(ctx context.Context, body DeleteParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteParametersResponse, error) + + // DeleteParameterWithResponse request + DeleteParameterWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteParameterResponse, error) + + // GetParameterWithResponse request + GetParameterWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetParameterResponse, error) + + // PutParameterWithBodyWithResponse request with any body + PutParameterWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutParameterResponse, error) + + PutParameterWithResponse(ctx context.Context, id string, body PutParameterJSONRequestBody, reqEditors ...RequestEditorFn) (*PutParameterResponse, error) + + // GetPrivateLocationsWithResponse request + GetPrivateLocationsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetPrivateLocationsResponse, error) + + // PostPrivateLocationWithBodyWithResponse request with any body + PostPrivateLocationWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostPrivateLocationResponse, error) + + PostPrivateLocationWithResponse(ctx context.Context, body PostPrivateLocationJSONRequestBody, reqEditors ...RequestEditorFn) (*PostPrivateLocationResponse, error) + + // DeletePrivateLocationWithResponse request + DeletePrivateLocationWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeletePrivateLocationResponse, error) + + // GetPrivateLocationWithResponse request + GetPrivateLocationWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetPrivateLocationResponse, error) + + // PutPrivateLocationWithBodyWithResponse request with any body + PutPrivateLocationWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutPrivateLocationResponse, error) + + PutPrivateLocationWithResponse(ctx context.Context, id string, body PutPrivateLocationJSONRequestBody, reqEditors ...RequestEditorFn) (*PutPrivateLocationResponse, error) + + // TaskManagerHealthWithResponse request + TaskManagerHealthWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*TaskManagerHealthResponse, error) + + // DeleteTimelinesWithBodyWithResponse request with any body + DeleteTimelinesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteTimelinesResponse, error) + + DeleteTimelinesWithResponse(ctx context.Context, body DeleteTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteTimelinesResponse, error) + + // GetTimelineWithResponse request + GetTimelineWithResponse(ctx context.Context, params *GetTimelineParams, reqEditors ...RequestEditorFn) (*GetTimelineResponse, error) + + // PatchTimelineWithBodyWithResponse request with any body + PatchTimelineWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchTimelineResponse, error) + + PatchTimelineWithResponse(ctx context.Context, body PatchTimelineJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchTimelineResponse, error) + + // CreateTimelinesWithBodyWithResponse request with any body + CreateTimelinesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateTimelinesResponse, error) + + CreateTimelinesWithResponse(ctx context.Context, body CreateTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateTimelinesResponse, error) + + // CopyTimelineWithBodyWithResponse request with any body + CopyTimelineWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CopyTimelineResponse, error) + + CopyTimelineWithResponse(ctx context.Context, body CopyTimelineJSONRequestBody, reqEditors ...RequestEditorFn) (*CopyTimelineResponse, error) + + // GetDraftTimelinesWithResponse request + GetDraftTimelinesWithResponse(ctx context.Context, params *GetDraftTimelinesParams, reqEditors ...RequestEditorFn) (*GetDraftTimelinesResponse, error) + + // CleanDraftTimelinesWithBodyWithResponse request with any body + CleanDraftTimelinesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CleanDraftTimelinesResponse, error) + + CleanDraftTimelinesWithResponse(ctx context.Context, body CleanDraftTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*CleanDraftTimelinesResponse, error) + + // ExportTimelinesWithBodyWithResponse request with any body + ExportTimelinesWithBodyWithResponse(ctx context.Context, params *ExportTimelinesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ExportTimelinesResponse, error) + + ExportTimelinesWithResponse(ctx context.Context, params *ExportTimelinesParams, body ExportTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*ExportTimelinesResponse, error) + + // PersistFavoriteRouteWithBodyWithResponse request with any body + PersistFavoriteRouteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PersistFavoriteRouteResponse, error) + + PersistFavoriteRouteWithResponse(ctx context.Context, body PersistFavoriteRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*PersistFavoriteRouteResponse, error) + + // ImportTimelinesWithBodyWithResponse request with any body + ImportTimelinesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ImportTimelinesResponse, error) + + ImportTimelinesWithResponse(ctx context.Context, body ImportTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*ImportTimelinesResponse, error) + + // InstallPrepackedTimelinesWithBodyWithResponse request with any body + InstallPrepackedTimelinesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*InstallPrepackedTimelinesResponse, error) + + InstallPrepackedTimelinesWithResponse(ctx context.Context, body InstallPrepackedTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*InstallPrepackedTimelinesResponse, error) + + // ResolveTimelineWithResponse request + ResolveTimelineWithResponse(ctx context.Context, params *ResolveTimelineParams, reqEditors ...RequestEditorFn) (*ResolveTimelineResponse, error) + + // GetTimelinesWithResponse request + GetTimelinesWithResponse(ctx context.Context, params *GetTimelinesParams, reqEditors ...RequestEditorFn) (*GetTimelinesResponse, error) + + // GetUpgradeStatusWithResponse request + GetUpgradeStatusWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetUpgradeStatusResponse, error) + + // GetUptimeSettingsWithResponse request + GetUptimeSettingsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetUptimeSettingsResponse, error) + + // PutUptimeSettingsWithBodyWithResponse request with any body + PutUptimeSettingsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutUptimeSettingsResponse, error) + + PutUptimeSettingsWithResponse(ctx context.Context, body PutUptimeSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*PutUptimeSettingsResponse, error) + + // DeleteActionsConnectorIdWithResponse request + DeleteActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteActionsConnectorIdResponse, error) + + // GetActionsConnectorIdWithResponse request + GetActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetActionsConnectorIdResponse, error) + + // PostActionsConnectorIdWithBodyWithResponse request with any body + PostActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) + + PostActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) + + // PutActionsConnectorIdWithBodyWithResponse request with any body + PutActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) + + PutActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) + + // GetActionsConnectorsWithResponse request + GetActionsConnectorsWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetActionsConnectorsResponse, error) + + // GetAllDataViewsDefaultWithResponse request + GetAllDataViewsDefaultWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetAllDataViewsDefaultResponse, error) + + // CreateDataViewDefaultwWithBodyWithResponse request with any body + CreateDataViewDefaultwWithBodyWithResponse(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateDataViewDefaultwResponse, error) + + CreateDataViewDefaultwWithResponse(ctx context.Context, spaceId SpaceId, body CreateDataViewDefaultwJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateDataViewDefaultwResponse, error) + + // DeleteDataViewDefaultWithResponse request + DeleteDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*DeleteDataViewDefaultResponse, error) + + // GetDataViewDefaultWithResponse request + GetDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*GetDataViewDefaultResponse, error) + + // UpdateDataViewDefaultWithBodyWithResponse request with any body + UpdateDataViewDefaultWithBodyWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateDataViewDefaultResponse, error) + + UpdateDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateDataViewDefaultResponse, error) + + // PostMaintenanceWindowWithBodyWithResponse request with any body + PostMaintenanceWindowWithBodyWithResponse(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) + + PostMaintenanceWindowWithResponse(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) + + // DeleteMaintenanceWindowIdWithResponse request + DeleteMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteMaintenanceWindowIdResponse, error) + + // GetMaintenanceWindowIdWithResponse request + GetMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetMaintenanceWindowIdResponse, error) + + // PatchMaintenanceWindowIdWithBodyWithResponse request with any body + PatchMaintenanceWindowIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) + + PatchMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) + + // FindSlosOpWithResponse request + FindSlosOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, params *FindSlosOpParams, reqEditors ...RequestEditorFn) (*FindSlosOpResponse, error) + + // CreateSloOpWithBodyWithResponse request with any body + CreateSloOpWithBodyWithResponse(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateSloOpResponse, error) + + CreateSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, body CreateSloOpJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateSloOpResponse, error) + + // BulkDeleteOpWithBodyWithResponse request with any body + BulkDeleteOpWithBodyWithResponse(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkDeleteOpResponse, error) + + BulkDeleteOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, body BulkDeleteOpJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkDeleteOpResponse, error) + + // BulkDeleteStatusOpWithResponse request + BulkDeleteStatusOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, taskId string, reqEditors ...RequestEditorFn) (*BulkDeleteStatusOpResponse, error) + + // DeleteRollupDataOpWithBodyWithResponse request with any body + DeleteRollupDataOpWithBodyWithResponse(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteRollupDataOpResponse, error) + + DeleteRollupDataOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, body DeleteRollupDataOpJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteRollupDataOpResponse, error) + + // DeleteSloInstancesOpWithBodyWithResponse request with any body + DeleteSloInstancesOpWithBodyWithResponse(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteSloInstancesOpResponse, error) + + DeleteSloInstancesOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, body DeleteSloInstancesOpJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteSloInstancesOpResponse, error) + + // DeleteSloOpWithResponse request + DeleteSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*DeleteSloOpResponse, error) + + // GetSloOpWithResponse request + GetSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, params *GetSloOpParams, reqEditors ...RequestEditorFn) (*GetSloOpResponse, error) + + // UpdateSloOpWithBodyWithResponse request with any body + UpdateSloOpWithBodyWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateSloOpResponse, error) + + UpdateSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, body UpdateSloOpJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateSloOpResponse, error) + + // ResetSloOpWithResponse request + ResetSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*ResetSloOpResponse, error) + + // DisableSloOpWithResponse request + DisableSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*DisableSloOpResponse, error) + + // EnableSloOpWithResponse request + EnableSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*EnableSloOpResponse, error) + + // GetDefinitionsOpWithResponse request + GetDefinitionsOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, params *GetDefinitionsOpParams, reqEditors ...RequestEditorFn) (*GetDefinitionsOpResponse, error) +} + +type PostActionsConnectorIdExecuteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Config *map[string]interface{} `json:"config,omitempty"` + + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` + + // Id The identifier for the connector. + Id string `json:"id"` + + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` + + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` + + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` + } +} + +// Status returns HTTPResponse.Status +func (r PostActionsConnectorIdExecuteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostActionsConnectorIdExecuteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetActionsConnectorTypesResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r GetActionsConnectorTypesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetActionsConnectorTypesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetAlertingHealthResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // AlertingFrameworkHealth Three substates identify the health of the alerting framework: `decryption_health`, `execution_health`, and `read_health`. + AlertingFrameworkHealth *struct { + // DecryptionHealth The timestamp and status of the rule decryption. + DecryptionHealth *struct { + Status *GetAlertingHealth200AlertingFrameworkHealthDecryptionHealthStatus `json:"status,omitempty"` + Timestamp *time.Time `json:"timestamp,omitempty"` + } `json:"decryption_health,omitempty"` + + // ExecutionHealth The timestamp and status of the rule run. + ExecutionHealth *struct { + Status *GetAlertingHealth200AlertingFrameworkHealthExecutionHealthStatus `json:"status,omitempty"` + Timestamp *time.Time `json:"timestamp,omitempty"` + } `json:"execution_health,omitempty"` + + // ReadHealth The timestamp and status of the rule reading events. + ReadHealth *struct { + Status *GetAlertingHealth200AlertingFrameworkHealthReadHealthStatus `json:"status,omitempty"` + Timestamp *time.Time `json:"timestamp,omitempty"` + } `json:"read_health,omitempty"` + } `json:"alerting_framework_health,omitempty"` + + // HasPermanentEncryptionKey If `false`, the encrypted saved object plugin does not have a permanent encryption key. + HasPermanentEncryptionKey *bool `json:"has_permanent_encryption_key,omitempty"` + + // IsSufficientlySecure If `false`, security is enabled but TLS is not. + IsSufficientlySecure *bool `json:"is_sufficiently_secure,omitempty"` + } + JSON401 *Alerting401Response +} +type GetAlertingHealth200AlertingFrameworkHealthDecryptionHealthStatus string +type GetAlertingHealth200AlertingFrameworkHealthExecutionHealthStatus string +type GetAlertingHealth200AlertingFrameworkHealthReadHealthStatus string + +// Status returns HTTPResponse.Status +func (r GetAlertingHealthResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetAlertingHealthResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteAlertingRuleIdResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r DeleteAlertingRuleIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteAlertingRuleIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetAlertingRuleIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Actions []struct { + // AlertsFilter Defines a period that limits whether the action runs. + AlertsFilter *struct { + Query *struct { + // Dsl A filter written in Elasticsearch Query Domain Specific Language (DSL). + Dsl *string `json:"dsl,omitempty"` + + // Filters A filter written in Elasticsearch Query Domain Specific Language (DSL) as defined in the `kbn-es-query` package. + Filters []struct { + State *struct { + // Store A filter can be either specific to an application context or applied globally. + Store GetAlertingRuleId200ActionsAlertsFilterQueryFiltersStateStore `json:"store"` + } `json:"$state,omitempty"` + Meta map[string]interface{} `json:"meta"` + Query *map[string]interface{} `json:"query,omitempty"` + } `json:"filters"` + + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query,omitempty"` + Timeframe *struct { + // Days Defines the days of the week that the action can run, represented as an array of numbers. For example, `1` represents Monday. An empty array is equivalent to specifying all the days of the week. + Days []GetAlertingRuleId200ActionsAlertsFilterTimeframeDays `json:"days"` + Hours struct { + // End The end of the time frame in 24-hour notation (`hh:mm`). + End string `json:"end"` + + // Start The start of the time frame in 24-hour notation (`hh:mm`). + Start string `json:"start"` + } `json:"hours"` + + // Timezone The ISO time zone for the `hours` values. Values such as `UTC` and `UTC+1` also work but lack built-in daylight savings time support and are not recommended. + Timezone string `json:"timezone"` + } `json:"timeframe,omitempty"` + } `json:"alerts_filter,omitempty"` + + // ConnectorTypeId The type of connector. This property appears in responses but cannot be set in requests. + ConnectorTypeId string `json:"connector_type_id"` + Frequency *struct { + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen GetAlertingRuleId200ActionsFrequencyNotifyWhen `json:"notify_when"` + + // Summary Indicates whether the action is a summary. + Summary bool `json:"summary"` + + // Throttle The throttle interval, which defines how often an alert generates repeated actions. It is specified in seconds, minutes, hours, or days and is applicable only if 'notify_when' is set to 'onThrottleInterval'. NOTE: You cannot specify the throttle interval at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle"` + } `json:"frequency,omitempty"` + + // Group The group name, which affects when the action runs (for example, when the threshold is met or when the alert is recovered). Each rule type has a list of valid action group names. If you don't need to group actions, set to `default`. + Group *string `json:"group,omitempty"` + + // Id The identifier for the connector saved object. + Id string `json:"id"` + + // Params The parameters for the action, which are sent to the connector. The `params` are handled as Mustache templates and passed a default set of context. + Params map[string]interface{} `json:"params"` + + // UseAlertDataForTemplate Indicates whether to use alert data as a template. + UseAlertDataForTemplate *bool `json:"use_alert_data_for_template,omitempty"` + + // Uuid A universally unique identifier (UUID) for the action. + Uuid *string `json:"uuid,omitempty"` + } `json:"actions"` + ActiveSnoozes *[]string `json:"active_snoozes,omitempty"` + + // AlertDelay Indicates that an alert occurs only when the specified number of consecutive runs met the rule conditions. + AlertDelay *struct { + // Active The number of consecutive runs that must meet the rule conditions. + Active float32 `json:"active"` + } `json:"alert_delay,omitempty"` + + // ApiKeyCreatedByUser Indicates whether the API key that is associated with the rule was created by the user. + ApiKeyCreatedByUser *bool `json:"api_key_created_by_user"` + + // ApiKeyOwner The owner of the API key that is associated with the rule and used to run background tasks. + ApiKeyOwner *string `json:"api_key_owner"` + Artifacts *struct { + Dashboards *[]struct { + Id string `json:"id"` + } `json:"dashboards,omitempty"` + InvestigationGuide *struct { + // Blob User-created content that describes alert causes and remdiation. + Blob string `json:"blob"` + } `json:"investigation_guide,omitempty"` + } `json:"artifacts,omitempty"` + + // Consumer The name of the application or feature that owns the rule. For example: `alerts`, `apm`, `discover`, `infrastructure`, `logs`, `metrics`, `ml`, `monitoring`, `securitySolution`, `siem`, `stackAlerts`, or `uptime`. + Consumer string `json:"consumer"` + + // CreatedAt The date and time that the rule was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the rule. + CreatedBy *string `json:"created_by"` + + // Enabled Indicates whether you want to run the rule on an interval basis after it is created. + Enabled bool `json:"enabled"` + ExecutionStatus struct { + Error *struct { + // Message Error message. + Message string `json:"message"` + + // Reason Reason for error. + Reason GetAlertingRuleId200ExecutionStatusErrorReason `json:"reason"` + } `json:"error,omitempty"` + + // LastDuration Duration of last execution of the rule. + LastDuration *float32 `json:"last_duration,omitempty"` + + // LastExecutionDate The date and time when rule was executed last. + LastExecutionDate string `json:"last_execution_date"` + + // Status Status of rule execution. + Status GetAlertingRuleId200ExecutionStatusStatus `json:"status"` + Warning *struct { + // Message Warning message. + Message string `json:"message"` + + // Reason Reason for warning. + Reason GetAlertingRuleId200ExecutionStatusWarningReason `json:"reason"` + } `json:"warning,omitempty"` + } `json:"execution_status"` + + // Flapping When flapping detection is turned on, alerts that switch quickly between active and recovered states are identified as “flapping” and notifications are reduced. + Flapping *struct { + // LookBackWindow The minimum number of runs in which the threshold must be met. + LookBackWindow float32 `json:"look_back_window"` + + // StatusChangeThreshold The minimum number of times an alert must switch states in the look back window. + StatusChangeThreshold float32 `json:"status_change_threshold"` + } `json:"flapping"` + + // Id The identifier for the rule. + Id string `json:"id"` + + // IsSnoozedUntil The date when the rule will no longer be snoozed. + IsSnoozedUntil *string `json:"is_snoozed_until"` + LastRun *struct { + AlertsCount struct { + // Active Number of active alerts during last run. + Active *float32 `json:"active"` + + // Ignored Number of ignored alerts during last run. + Ignored *float32 `json:"ignored"` + + // New Number of new alerts during last run. + New *float32 `json:"new"` + + // Recovered Number of recovered alerts during last run. + Recovered *float32 `json:"recovered"` + } `json:"alerts_count"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome GetAlertingRuleId200LastRunOutcome `json:"outcome"` + OutcomeMsg *[]string `json:"outcome_msg"` + + // OutcomeOrder Order of the outcome. + OutcomeOrder *float32 `json:"outcome_order,omitempty"` + + // Warning Warning of last rule execution. + Warning *GetAlertingRuleId200LastRunWarning `json:"warning"` + } `json:"last_run"` + MappedParams *map[string]interface{} `json:"mapped_params,omitempty"` + + // Monitoring Monitoring details of the rule. + Monitoring *struct { + // Run Rule run details. + Run struct { + // CalculatedMetrics Calculation of different percentiles and success ratio. + CalculatedMetrics struct { + P50 *float32 `json:"p50,omitempty"` + P95 *float32 `json:"p95,omitempty"` + P99 *float32 `json:"p99,omitempty"` + SuccessRatio float32 `json:"success_ratio"` + } `json:"calculated_metrics"` + + // History History of the rule run. + History []struct { + // Duration Duration of the rule run. + Duration *float32 `json:"duration,omitempty"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome *GetAlertingRuleId200MonitoringRunHistoryOutcome `json:"outcome,omitempty"` + + // Success Indicates whether the rule run was successful. + Success bool `json:"success"` + + // Timestamp Time of rule run. + Timestamp float32 `json:"timestamp"` + } `json:"history"` + LastRun struct { + Metrics struct { + // Duration Duration of most recent rule run. + Duration *float32 `json:"duration,omitempty"` + + // GapDurationS Duration in seconds of rule run gap. + GapDurationS *float32 `json:"gap_duration_s"` + GapRange *struct { + // Gte End of the gap range. + Gte string `json:"gte"` + + // Lte Start of the gap range. + Lte string `json:"lte"` + } `json:"gap_range"` + + // TotalAlertsCreated Total number of alerts created during last rule run. + TotalAlertsCreated *float32 `json:"total_alerts_created"` + + // TotalAlertsDetected Total number of alerts detected during last rule run. + TotalAlertsDetected *float32 `json:"total_alerts_detected"` + + // TotalIndexingDurationMs Total time spent indexing documents during last rule run in milliseconds. + TotalIndexingDurationMs *float32 `json:"total_indexing_duration_ms"` + + // TotalSearchDurationMs Total time spent performing Elasticsearch searches as measured by Kibana; includes network latency and time spent serializing or deserializing the request and response. + TotalSearchDurationMs *float32 `json:"total_search_duration_ms"` + } `json:"metrics"` + + // Timestamp Time of the most recent rule run. + Timestamp string `json:"timestamp"` + } `json:"last_run"` + } `json:"run"` + } `json:"monitoring,omitempty"` + + // MuteAll Indicates whether all alerts are muted. + MuteAll bool `json:"mute_all"` + MutedAlertIds []string `json:"muted_alert_ids"` + + // Name The name of the rule. + Name string `json:"name"` + + // NextRun Date and time of the next run of the rule. + NextRun *string `json:"next_run"` + + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen *GetAlertingRuleId200NotifyWhen `json:"notify_when"` + + // Params The parameters for the rule. + Params map[string]interface{} `json:"params"` + + // Revision The rule revision number. + Revision float32 `json:"revision"` + + // RuleTypeId The rule type identifier. + RuleTypeId string `json:"rule_type_id"` + + // Running Indicates whether the rule is running. + Running *bool `json:"running"` + Schedule struct { + // Interval The interval is specified in seconds, minutes, hours, or days. + Interval string `json:"interval"` + } `json:"schedule"` + + // ScheduledTaskId Identifier of the scheduled task. + ScheduledTaskId *string `json:"scheduled_task_id,omitempty"` + SnoozeSchedule *[]struct { + // Duration Duration of the rule snooze schedule. + Duration float32 `json:"duration"` + + // Id Identifier of the rule snooze schedule. + Id *string `json:"id,omitempty"` + RRule struct { + Byhour *[]float32 `json:"byhour"` + Byminute *[]float32 `json:"byminute"` + Bymonth *[]float32 `json:"bymonth"` + Bymonthday *[]float32 `json:"bymonthday"` + Bysecond *[]float32 `json:"bysecond"` + Bysetpos *[]float32 `json:"bysetpos"` + Byweekday *[]GetAlertingRuleId_200_SnoozeSchedule_RRule_Byweekday_Item `json:"byweekday"` + Byweekno *[]float32 `json:"byweekno"` + Byyearday *[]float32 `json:"byyearday"` + + // Count Number of times the rule should recur until it stops. + Count *float32 `json:"count,omitempty"` + + // Dtstart Rule start date in Coordinated Universal Time (UTC). + Dtstart string `json:"dtstart"` + + // Freq Indicates frequency of the rule. Options are YEARLY, MONTHLY, WEEKLY, DAILY. + Freq *GetAlertingRuleId200SnoozeScheduleRRuleFreq `json:"freq,omitempty"` + + // Interval Indicates the interval of frequency. For example, 1 and YEARLY is every 1 year, 2 and WEEKLY is every 2 weeks. + Interval *float32 `json:"interval,omitempty"` + + // Tzid Indicates timezone abbreviation. + Tzid string `json:"tzid"` + + // Until Recur the rule until this date. + Until *string `json:"until,omitempty"` + + // Wkst Indicates the start of week, defaults to Monday. + Wkst *GetAlertingRuleId200SnoozeScheduleRRuleWkst `json:"wkst,omitempty"` + } `json:"rRule"` + SkipRecurrences *[]string `json:"skipRecurrences,omitempty"` + } `json:"snooze_schedule,omitempty"` + Tags []string `json:"tags"` + + // Throttle Deprecated in 8.13.0. Use the `throttle` property in the action `frequency` object instead. The throttle interval, which defines how often an alert generates repeated actions. NOTE: You cannot specify the throttle interval at both the rule and action level. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Throttle *string `json:"throttle"` + + // UpdatedAt The date and time that the rule was updated most recently. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that updated this rule most recently. + UpdatedBy *string `json:"updated_by"` + + // ViewInAppRelativeUrl Relative URL to view rule in the app. + ViewInAppRelativeUrl *string `json:"view_in_app_relative_url"` + } +} +type GetAlertingRuleId200ActionsAlertsFilterQueryFiltersStateStore string +type GetAlertingRuleId200ActionsAlertsFilterTimeframeDays int +type GetAlertingRuleId200ActionsFrequencyNotifyWhen string +type GetAlertingRuleId200ExecutionStatusErrorReason string +type GetAlertingRuleId200ExecutionStatusStatus string +type GetAlertingRuleId200ExecutionStatusWarningReason string +type GetAlertingRuleId200LastRunOutcome string +type GetAlertingRuleId200LastRunWarning string +type GetAlertingRuleId200MonitoringRunHistoryOutcome string +type GetAlertingRuleId200NotifyWhen string +type GetAlertingRuleId200SnoozeScheduleRRuleByweekday0 = string +type GetAlertingRuleId200SnoozeScheduleRRuleByweekday1 = float32 +type GetAlertingRuleId_200_SnoozeSchedule_RRule_Byweekday_Item struct { + union json.RawMessage +} +type GetAlertingRuleId200SnoozeScheduleRRuleFreq int +type GetAlertingRuleId200SnoozeScheduleRRuleWkst string + +// Status returns HTTPResponse.Status +func (r GetAlertingRuleIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetAlertingRuleIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostAlertingRuleIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Actions []struct { + // AlertsFilter Defines a period that limits whether the action runs. + AlertsFilter *struct { + Query *struct { + // Dsl A filter written in Elasticsearch Query Domain Specific Language (DSL). + Dsl *string `json:"dsl,omitempty"` + + // Filters A filter written in Elasticsearch Query Domain Specific Language (DSL) as defined in the `kbn-es-query` package. + Filters []struct { + State *struct { + // Store A filter can be either specific to an application context or applied globally. + Store PostAlertingRuleId200ActionsAlertsFilterQueryFiltersStateStore `json:"store"` + } `json:"$state,omitempty"` + Meta map[string]interface{} `json:"meta"` + Query *map[string]interface{} `json:"query,omitempty"` + } `json:"filters"` + + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query,omitempty"` + Timeframe *struct { + // Days Defines the days of the week that the action can run, represented as an array of numbers. For example, `1` represents Monday. An empty array is equivalent to specifying all the days of the week. + Days []PostAlertingRuleId200ActionsAlertsFilterTimeframeDays `json:"days"` + Hours struct { + // End The end of the time frame in 24-hour notation (`hh:mm`). + End string `json:"end"` + + // Start The start of the time frame in 24-hour notation (`hh:mm`). + Start string `json:"start"` + } `json:"hours"` + + // Timezone The ISO time zone for the `hours` values. Values such as `UTC` and `UTC+1` also work but lack built-in daylight savings time support and are not recommended. + Timezone string `json:"timezone"` + } `json:"timeframe,omitempty"` + } `json:"alerts_filter,omitempty"` + + // ConnectorTypeId The type of connector. This property appears in responses but cannot be set in requests. + ConnectorTypeId string `json:"connector_type_id"` + Frequency *struct { + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen PostAlertingRuleId200ActionsFrequencyNotifyWhen `json:"notify_when"` + + // Summary Indicates whether the action is a summary. + Summary bool `json:"summary"` + + // Throttle The throttle interval, which defines how often an alert generates repeated actions. It is specified in seconds, minutes, hours, or days and is applicable only if 'notify_when' is set to 'onThrottleInterval'. NOTE: You cannot specify the throttle interval at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle"` + } `json:"frequency,omitempty"` + + // Group The group name, which affects when the action runs (for example, when the threshold is met or when the alert is recovered). Each rule type has a list of valid action group names. If you don't need to group actions, set to `default`. + Group *string `json:"group,omitempty"` + + // Id The identifier for the connector saved object. + Id string `json:"id"` + + // Params The parameters for the action, which are sent to the connector. The `params` are handled as Mustache templates and passed a default set of context. + Params map[string]interface{} `json:"params"` + + // UseAlertDataForTemplate Indicates whether to use alert data as a template. + UseAlertDataForTemplate *bool `json:"use_alert_data_for_template,omitempty"` + + // Uuid A universally unique identifier (UUID) for the action. + Uuid *string `json:"uuid,omitempty"` + } `json:"actions"` + ActiveSnoozes *[]string `json:"active_snoozes,omitempty"` + + // AlertDelay Indicates that an alert occurs only when the specified number of consecutive runs met the rule conditions. + AlertDelay *struct { + // Active The number of consecutive runs that must meet the rule conditions. + Active float32 `json:"active"` + } `json:"alert_delay,omitempty"` + + // ApiKeyCreatedByUser Indicates whether the API key that is associated with the rule was created by the user. + ApiKeyCreatedByUser *bool `json:"api_key_created_by_user"` + + // ApiKeyOwner The owner of the API key that is associated with the rule and used to run background tasks. + ApiKeyOwner *string `json:"api_key_owner"` + Artifacts *struct { + Dashboards *[]struct { + Id string `json:"id"` + } `json:"dashboards,omitempty"` + InvestigationGuide *struct { + // Blob User-created content that describes alert causes and remdiation. + Blob string `json:"blob"` + } `json:"investigation_guide,omitempty"` + } `json:"artifacts,omitempty"` + + // Consumer The name of the application or feature that owns the rule. For example: `alerts`, `apm`, `discover`, `infrastructure`, `logs`, `metrics`, `ml`, `monitoring`, `securitySolution`, `siem`, `stackAlerts`, or `uptime`. + Consumer string `json:"consumer"` + + // CreatedAt The date and time that the rule was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the rule. + CreatedBy *string `json:"created_by"` + + // Enabled Indicates whether you want to run the rule on an interval basis after it is created. + Enabled bool `json:"enabled"` + ExecutionStatus struct { + Error *struct { + // Message Error message. + Message string `json:"message"` + + // Reason Reason for error. + Reason PostAlertingRuleId200ExecutionStatusErrorReason `json:"reason"` + } `json:"error,omitempty"` + + // LastDuration Duration of last execution of the rule. + LastDuration *float32 `json:"last_duration,omitempty"` + + // LastExecutionDate The date and time when rule was executed last. + LastExecutionDate string `json:"last_execution_date"` + + // Status Status of rule execution. + Status PostAlertingRuleId200ExecutionStatusStatus `json:"status"` + Warning *struct { + // Message Warning message. + Message string `json:"message"` + + // Reason Reason for warning. + Reason PostAlertingRuleId200ExecutionStatusWarningReason `json:"reason"` + } `json:"warning,omitempty"` + } `json:"execution_status"` + + // Flapping When flapping detection is turned on, alerts that switch quickly between active and recovered states are identified as “flapping” and notifications are reduced. + Flapping *struct { + // LookBackWindow The minimum number of runs in which the threshold must be met. + LookBackWindow float32 `json:"look_back_window"` + + // StatusChangeThreshold The minimum number of times an alert must switch states in the look back window. + StatusChangeThreshold float32 `json:"status_change_threshold"` + } `json:"flapping"` + + // Id The identifier for the rule. + Id string `json:"id"` + + // IsSnoozedUntil The date when the rule will no longer be snoozed. + IsSnoozedUntil *string `json:"is_snoozed_until"` + LastRun *struct { + AlertsCount struct { + // Active Number of active alerts during last run. + Active *float32 `json:"active"` + + // Ignored Number of ignored alerts during last run. + Ignored *float32 `json:"ignored"` + + // New Number of new alerts during last run. + New *float32 `json:"new"` + + // Recovered Number of recovered alerts during last run. + Recovered *float32 `json:"recovered"` + } `json:"alerts_count"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome PostAlertingRuleId200LastRunOutcome `json:"outcome"` + OutcomeMsg *[]string `json:"outcome_msg"` + + // OutcomeOrder Order of the outcome. + OutcomeOrder *float32 `json:"outcome_order,omitempty"` + + // Warning Warning of last rule execution. + Warning *PostAlertingRuleId200LastRunWarning `json:"warning"` + } `json:"last_run"` + MappedParams *map[string]interface{} `json:"mapped_params,omitempty"` + + // Monitoring Monitoring details of the rule. + Monitoring *struct { + // Run Rule run details. + Run struct { + // CalculatedMetrics Calculation of different percentiles and success ratio. + CalculatedMetrics struct { + P50 *float32 `json:"p50,omitempty"` + P95 *float32 `json:"p95,omitempty"` + P99 *float32 `json:"p99,omitempty"` + SuccessRatio float32 `json:"success_ratio"` + } `json:"calculated_metrics"` + + // History History of the rule run. + History []struct { + // Duration Duration of the rule run. + Duration *float32 `json:"duration,omitempty"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome *PostAlertingRuleId200MonitoringRunHistoryOutcome `json:"outcome,omitempty"` + + // Success Indicates whether the rule run was successful. + Success bool `json:"success"` + + // Timestamp Time of rule run. + Timestamp float32 `json:"timestamp"` + } `json:"history"` + LastRun struct { + Metrics struct { + // Duration Duration of most recent rule run. + Duration *float32 `json:"duration,omitempty"` + + // GapDurationS Duration in seconds of rule run gap. + GapDurationS *float32 `json:"gap_duration_s"` + GapRange *struct { + // Gte End of the gap range. + Gte string `json:"gte"` + + // Lte Start of the gap range. + Lte string `json:"lte"` + } `json:"gap_range"` + + // TotalAlertsCreated Total number of alerts created during last rule run. + TotalAlertsCreated *float32 `json:"total_alerts_created"` + + // TotalAlertsDetected Total number of alerts detected during last rule run. + TotalAlertsDetected *float32 `json:"total_alerts_detected"` + + // TotalIndexingDurationMs Total time spent indexing documents during last rule run in milliseconds. + TotalIndexingDurationMs *float32 `json:"total_indexing_duration_ms"` + + // TotalSearchDurationMs Total time spent performing Elasticsearch searches as measured by Kibana; includes network latency and time spent serializing or deserializing the request and response. + TotalSearchDurationMs *float32 `json:"total_search_duration_ms"` + } `json:"metrics"` + + // Timestamp Time of the most recent rule run. + Timestamp string `json:"timestamp"` + } `json:"last_run"` + } `json:"run"` + } `json:"monitoring,omitempty"` + + // MuteAll Indicates whether all alerts are muted. + MuteAll bool `json:"mute_all"` + MutedAlertIds []string `json:"muted_alert_ids"` + + // Name The name of the rule. + Name string `json:"name"` + + // NextRun Date and time of the next run of the rule. + NextRun *string `json:"next_run"` + + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen *PostAlertingRuleId200NotifyWhen `json:"notify_when"` + + // Params The parameters for the rule. + Params map[string]interface{} `json:"params"` + + // Revision The rule revision number. + Revision float32 `json:"revision"` + + // RuleTypeId The rule type identifier. + RuleTypeId string `json:"rule_type_id"` + + // Running Indicates whether the rule is running. + Running *bool `json:"running"` + Schedule struct { + // Interval The interval is specified in seconds, minutes, hours, or days. + Interval string `json:"interval"` + } `json:"schedule"` + + // ScheduledTaskId Identifier of the scheduled task. + ScheduledTaskId *string `json:"scheduled_task_id,omitempty"` + SnoozeSchedule *[]struct { + // Duration Duration of the rule snooze schedule. + Duration float32 `json:"duration"` + + // Id Identifier of the rule snooze schedule. + Id *string `json:"id,omitempty"` + RRule struct { + Byhour *[]float32 `json:"byhour"` + Byminute *[]float32 `json:"byminute"` + Bymonth *[]float32 `json:"bymonth"` + Bymonthday *[]float32 `json:"bymonthday"` + Bysecond *[]float32 `json:"bysecond"` + Bysetpos *[]float32 `json:"bysetpos"` + Byweekday *[]PostAlertingRuleId_200_SnoozeSchedule_RRule_Byweekday_Item `json:"byweekday"` + Byweekno *[]float32 `json:"byweekno"` + Byyearday *[]float32 `json:"byyearday"` + + // Count Number of times the rule should recur until it stops. + Count *float32 `json:"count,omitempty"` + + // Dtstart Rule start date in Coordinated Universal Time (UTC). + Dtstart string `json:"dtstart"` + + // Freq Indicates frequency of the rule. Options are YEARLY, MONTHLY, WEEKLY, DAILY. + Freq *PostAlertingRuleId200SnoozeScheduleRRuleFreq `json:"freq,omitempty"` + + // Interval Indicates the interval of frequency. For example, 1 and YEARLY is every 1 year, 2 and WEEKLY is every 2 weeks. + Interval *float32 `json:"interval,omitempty"` + + // Tzid Indicates timezone abbreviation. + Tzid string `json:"tzid"` + + // Until Recur the rule until this date. + Until *string `json:"until,omitempty"` + + // Wkst Indicates the start of week, defaults to Monday. + Wkst *PostAlertingRuleId200SnoozeScheduleRRuleWkst `json:"wkst,omitempty"` + } `json:"rRule"` + SkipRecurrences *[]string `json:"skipRecurrences,omitempty"` + } `json:"snooze_schedule,omitempty"` + Tags []string `json:"tags"` + + // Throttle Deprecated in 8.13.0. Use the `throttle` property in the action `frequency` object instead. The throttle interval, which defines how often an alert generates repeated actions. NOTE: You cannot specify the throttle interval at both the rule and action level. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Throttle *string `json:"throttle"` + + // UpdatedAt The date and time that the rule was updated most recently. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that updated this rule most recently. + UpdatedBy *string `json:"updated_by"` + + // ViewInAppRelativeUrl Relative URL to view rule in the app. + ViewInAppRelativeUrl *string `json:"view_in_app_relative_url"` + } +} +type PostAlertingRuleId200ActionsAlertsFilterQueryFiltersStateStore string +type PostAlertingRuleId200ActionsAlertsFilterTimeframeDays int +type PostAlertingRuleId200ActionsFrequencyNotifyWhen string +type PostAlertingRuleId200ExecutionStatusErrorReason string +type PostAlertingRuleId200ExecutionStatusStatus string +type PostAlertingRuleId200ExecutionStatusWarningReason string +type PostAlertingRuleId200LastRunOutcome string +type PostAlertingRuleId200LastRunWarning string +type PostAlertingRuleId200MonitoringRunHistoryOutcome string +type PostAlertingRuleId200NotifyWhen string +type PostAlertingRuleId200SnoozeScheduleRRuleByweekday0 = string +type PostAlertingRuleId200SnoozeScheduleRRuleByweekday1 = float32 +type PostAlertingRuleId_200_SnoozeSchedule_RRule_Byweekday_Item struct { + union json.RawMessage +} +type PostAlertingRuleId200SnoozeScheduleRRuleFreq int +type PostAlertingRuleId200SnoozeScheduleRRuleWkst string + +// Status returns HTTPResponse.Status +func (r PostAlertingRuleIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAlertingRuleIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutAlertingRuleIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Actions []struct { + // AlertsFilter Defines a period that limits whether the action runs. + AlertsFilter *struct { + Query *struct { + // Dsl A filter written in Elasticsearch Query Domain Specific Language (DSL). + Dsl *string `json:"dsl,omitempty"` + + // Filters A filter written in Elasticsearch Query Domain Specific Language (DSL) as defined in the `kbn-es-query` package. + Filters []struct { + State *struct { + // Store A filter can be either specific to an application context or applied globally. + Store PutAlertingRuleId200ActionsAlertsFilterQueryFiltersStateStore `json:"store"` + } `json:"$state,omitempty"` + Meta map[string]interface{} `json:"meta"` + Query *map[string]interface{} `json:"query,omitempty"` + } `json:"filters"` + + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query,omitempty"` + Timeframe *struct { + // Days Defines the days of the week that the action can run, represented as an array of numbers. For example, `1` represents Monday. An empty array is equivalent to specifying all the days of the week. + Days []PutAlertingRuleId200ActionsAlertsFilterTimeframeDays `json:"days"` + Hours struct { + // End The end of the time frame in 24-hour notation (`hh:mm`). + End string `json:"end"` + + // Start The start of the time frame in 24-hour notation (`hh:mm`). + Start string `json:"start"` + } `json:"hours"` + + // Timezone The ISO time zone for the `hours` values. Values such as `UTC` and `UTC+1` also work but lack built-in daylight savings time support and are not recommended. + Timezone string `json:"timezone"` + } `json:"timeframe,omitempty"` + } `json:"alerts_filter,omitempty"` + + // ConnectorTypeId The type of connector. This property appears in responses but cannot be set in requests. + ConnectorTypeId string `json:"connector_type_id"` + Frequency *struct { + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen PutAlertingRuleId200ActionsFrequencyNotifyWhen `json:"notify_when"` + + // Summary Indicates whether the action is a summary. + Summary bool `json:"summary"` + + // Throttle The throttle interval, which defines how often an alert generates repeated actions. It is specified in seconds, minutes, hours, or days and is applicable only if 'notify_when' is set to 'onThrottleInterval'. NOTE: You cannot specify the throttle interval at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle"` + } `json:"frequency,omitempty"` + + // Group The group name, which affects when the action runs (for example, when the threshold is met or when the alert is recovered). Each rule type has a list of valid action group names. If you don't need to group actions, set to `default`. + Group *string `json:"group,omitempty"` + + // Id The identifier for the connector saved object. + Id string `json:"id"` + + // Params The parameters for the action, which are sent to the connector. The `params` are handled as Mustache templates and passed a default set of context. + Params map[string]interface{} `json:"params"` + + // UseAlertDataForTemplate Indicates whether to use alert data as a template. + UseAlertDataForTemplate *bool `json:"use_alert_data_for_template,omitempty"` + + // Uuid A universally unique identifier (UUID) for the action. + Uuid *string `json:"uuid,omitempty"` + } `json:"actions"` + ActiveSnoozes *[]string `json:"active_snoozes,omitempty"` + + // AlertDelay Indicates that an alert occurs only when the specified number of consecutive runs met the rule conditions. + AlertDelay *struct { + // Active The number of consecutive runs that must meet the rule conditions. + Active float32 `json:"active"` + } `json:"alert_delay,omitempty"` + + // ApiKeyCreatedByUser Indicates whether the API key that is associated with the rule was created by the user. + ApiKeyCreatedByUser *bool `json:"api_key_created_by_user"` + + // ApiKeyOwner The owner of the API key that is associated with the rule and used to run background tasks. + ApiKeyOwner *string `json:"api_key_owner"` + Artifacts *struct { + Dashboards *[]struct { + Id string `json:"id"` + } `json:"dashboards,omitempty"` + InvestigationGuide *struct { + // Blob User-created content that describes alert causes and remdiation. + Blob string `json:"blob"` + } `json:"investigation_guide,omitempty"` + } `json:"artifacts,omitempty"` + + // Consumer The name of the application or feature that owns the rule. For example: `alerts`, `apm`, `discover`, `infrastructure`, `logs`, `metrics`, `ml`, `monitoring`, `securitySolution`, `siem`, `stackAlerts`, or `uptime`. + Consumer string `json:"consumer"` + + // CreatedAt The date and time that the rule was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the rule. + CreatedBy *string `json:"created_by"` + + // Enabled Indicates whether you want to run the rule on an interval basis after it is created. + Enabled bool `json:"enabled"` + ExecutionStatus struct { + Error *struct { + // Message Error message. + Message string `json:"message"` + + // Reason Reason for error. + Reason PutAlertingRuleId200ExecutionStatusErrorReason `json:"reason"` + } `json:"error,omitempty"` + + // LastDuration Duration of last execution of the rule. + LastDuration *float32 `json:"last_duration,omitempty"` + + // LastExecutionDate The date and time when rule was executed last. + LastExecutionDate string `json:"last_execution_date"` + + // Status Status of rule execution. + Status PutAlertingRuleId200ExecutionStatusStatus `json:"status"` + Warning *struct { + // Message Warning message. + Message string `json:"message"` + + // Reason Reason for warning. + Reason PutAlertingRuleId200ExecutionStatusWarningReason `json:"reason"` + } `json:"warning,omitempty"` + } `json:"execution_status"` + + // Flapping When flapping detection is turned on, alerts that switch quickly between active and recovered states are identified as “flapping” and notifications are reduced. + Flapping *struct { + // LookBackWindow The minimum number of runs in which the threshold must be met. + LookBackWindow float32 `json:"look_back_window"` + + // StatusChangeThreshold The minimum number of times an alert must switch states in the look back window. + StatusChangeThreshold float32 `json:"status_change_threshold"` + } `json:"flapping"` + + // Id The identifier for the rule. + Id string `json:"id"` + + // IsSnoozedUntil The date when the rule will no longer be snoozed. + IsSnoozedUntil *string `json:"is_snoozed_until"` + LastRun *struct { + AlertsCount struct { + // Active Number of active alerts during last run. + Active *float32 `json:"active"` + + // Ignored Number of ignored alerts during last run. + Ignored *float32 `json:"ignored"` + + // New Number of new alerts during last run. + New *float32 `json:"new"` + + // Recovered Number of recovered alerts during last run. + Recovered *float32 `json:"recovered"` + } `json:"alerts_count"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome PutAlertingRuleId200LastRunOutcome `json:"outcome"` + OutcomeMsg *[]string `json:"outcome_msg"` + + // OutcomeOrder Order of the outcome. + OutcomeOrder *float32 `json:"outcome_order,omitempty"` + + // Warning Warning of last rule execution. + Warning *PutAlertingRuleId200LastRunWarning `json:"warning"` + } `json:"last_run"` + MappedParams *map[string]interface{} `json:"mapped_params,omitempty"` + + // Monitoring Monitoring details of the rule. + Monitoring *struct { + // Run Rule run details. + Run struct { + // CalculatedMetrics Calculation of different percentiles and success ratio. + CalculatedMetrics struct { + P50 *float32 `json:"p50,omitempty"` + P95 *float32 `json:"p95,omitempty"` + P99 *float32 `json:"p99,omitempty"` + SuccessRatio float32 `json:"success_ratio"` + } `json:"calculated_metrics"` + + // History History of the rule run. + History []struct { + // Duration Duration of the rule run. + Duration *float32 `json:"duration,omitempty"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome *PutAlertingRuleId200MonitoringRunHistoryOutcome `json:"outcome,omitempty"` + + // Success Indicates whether the rule run was successful. + Success bool `json:"success"` + + // Timestamp Time of rule run. + Timestamp float32 `json:"timestamp"` + } `json:"history"` + LastRun struct { + Metrics struct { + // Duration Duration of most recent rule run. + Duration *float32 `json:"duration,omitempty"` + + // GapDurationS Duration in seconds of rule run gap. + GapDurationS *float32 `json:"gap_duration_s"` + GapRange *struct { + // Gte End of the gap range. + Gte string `json:"gte"` + + // Lte Start of the gap range. + Lte string `json:"lte"` + } `json:"gap_range"` + + // TotalAlertsCreated Total number of alerts created during last rule run. + TotalAlertsCreated *float32 `json:"total_alerts_created"` + + // TotalAlertsDetected Total number of alerts detected during last rule run. + TotalAlertsDetected *float32 `json:"total_alerts_detected"` + + // TotalIndexingDurationMs Total time spent indexing documents during last rule run in milliseconds. + TotalIndexingDurationMs *float32 `json:"total_indexing_duration_ms"` + + // TotalSearchDurationMs Total time spent performing Elasticsearch searches as measured by Kibana; includes network latency and time spent serializing or deserializing the request and response. + TotalSearchDurationMs *float32 `json:"total_search_duration_ms"` + } `json:"metrics"` + + // Timestamp Time of the most recent rule run. + Timestamp string `json:"timestamp"` + } `json:"last_run"` + } `json:"run"` + } `json:"monitoring,omitempty"` + + // MuteAll Indicates whether all alerts are muted. + MuteAll bool `json:"mute_all"` + MutedAlertIds []string `json:"muted_alert_ids"` + + // Name The name of the rule. + Name string `json:"name"` + + // NextRun Date and time of the next run of the rule. + NextRun *string `json:"next_run"` + + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen *PutAlertingRuleId200NotifyWhen `json:"notify_when"` + + // Params The parameters for the rule. + Params map[string]interface{} `json:"params"` + + // Revision The rule revision number. + Revision float32 `json:"revision"` + + // RuleTypeId The rule type identifier. + RuleTypeId string `json:"rule_type_id"` + + // Running Indicates whether the rule is running. + Running *bool `json:"running"` + Schedule struct { + // Interval The interval is specified in seconds, minutes, hours, or days. + Interval string `json:"interval"` + } `json:"schedule"` + + // ScheduledTaskId Identifier of the scheduled task. + ScheduledTaskId *string `json:"scheduled_task_id,omitempty"` + SnoozeSchedule *[]struct { + // Duration Duration of the rule snooze schedule. + Duration float32 `json:"duration"` + + // Id Identifier of the rule snooze schedule. + Id *string `json:"id,omitempty"` + RRule struct { + Byhour *[]float32 `json:"byhour"` + Byminute *[]float32 `json:"byminute"` + Bymonth *[]float32 `json:"bymonth"` + Bymonthday *[]float32 `json:"bymonthday"` + Bysecond *[]float32 `json:"bysecond"` + Bysetpos *[]float32 `json:"bysetpos"` + Byweekday *[]PutAlertingRuleId_200_SnoozeSchedule_RRule_Byweekday_Item `json:"byweekday"` + Byweekno *[]float32 `json:"byweekno"` + Byyearday *[]float32 `json:"byyearday"` + + // Count Number of times the rule should recur until it stops. + Count *float32 `json:"count,omitempty"` + + // Dtstart Rule start date in Coordinated Universal Time (UTC). + Dtstart string `json:"dtstart"` + + // Freq Indicates frequency of the rule. Options are YEARLY, MONTHLY, WEEKLY, DAILY. + Freq *PutAlertingRuleId200SnoozeScheduleRRuleFreq `json:"freq,omitempty"` + + // Interval Indicates the interval of frequency. For example, 1 and YEARLY is every 1 year, 2 and WEEKLY is every 2 weeks. + Interval *float32 `json:"interval,omitempty"` + + // Tzid Indicates timezone abbreviation. + Tzid string `json:"tzid"` + + // Until Recur the rule until this date. + Until *string `json:"until,omitempty"` + + // Wkst Indicates the start of week, defaults to Monday. + Wkst *PutAlertingRuleId200SnoozeScheduleRRuleWkst `json:"wkst,omitempty"` + } `json:"rRule"` + SkipRecurrences *[]string `json:"skipRecurrences,omitempty"` + } `json:"snooze_schedule,omitempty"` + Tags []string `json:"tags"` + + // Throttle Deprecated in 8.13.0. Use the `throttle` property in the action `frequency` object instead. The throttle interval, which defines how often an alert generates repeated actions. NOTE: You cannot specify the throttle interval at both the rule and action level. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Throttle *string `json:"throttle"` + + // UpdatedAt The date and time that the rule was updated most recently. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that updated this rule most recently. + UpdatedBy *string `json:"updated_by"` + + // ViewInAppRelativeUrl Relative URL to view rule in the app. + ViewInAppRelativeUrl *string `json:"view_in_app_relative_url"` + } +} +type PutAlertingRuleId200ActionsAlertsFilterQueryFiltersStateStore string +type PutAlertingRuleId200ActionsAlertsFilterTimeframeDays int +type PutAlertingRuleId200ActionsFrequencyNotifyWhen string +type PutAlertingRuleId200ExecutionStatusErrorReason string +type PutAlertingRuleId200ExecutionStatusStatus string +type PutAlertingRuleId200ExecutionStatusWarningReason string +type PutAlertingRuleId200LastRunOutcome string +type PutAlertingRuleId200LastRunWarning string +type PutAlertingRuleId200MonitoringRunHistoryOutcome string +type PutAlertingRuleId200NotifyWhen string +type PutAlertingRuleId200SnoozeScheduleRRuleByweekday0 = string +type PutAlertingRuleId200SnoozeScheduleRRuleByweekday1 = float32 +type PutAlertingRuleId_200_SnoozeSchedule_RRule_Byweekday_Item struct { + union json.RawMessage +} +type PutAlertingRuleId200SnoozeScheduleRRuleFreq int +type PutAlertingRuleId200SnoozeScheduleRRuleWkst string + +// Status returns HTTPResponse.Status +func (r PutAlertingRuleIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutAlertingRuleIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostAlertingRuleIdDisableResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r PostAlertingRuleIdDisableResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAlertingRuleIdDisableResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostAlertingRuleIdEnableResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r PostAlertingRuleIdEnableResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAlertingRuleIdEnableResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostAlertingRuleIdMuteAllResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r PostAlertingRuleIdMuteAllResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAlertingRuleIdMuteAllResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostAlertingRuleIdUnmuteAllResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r PostAlertingRuleIdUnmuteAllResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAlertingRuleIdUnmuteAllResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostAlertingRuleIdUpdateApiKeyResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r PostAlertingRuleIdUpdateApiKeyResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAlertingRuleIdUpdateApiKeyResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostAlertingRuleIdSnoozeScheduleResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Body struct { + Schedule struct { + Custom *struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom,omitempty"` + + // Id Identifier of the snooze schedule. + Id string `json:"id"` + } `json:"schedule"` + } `json:"body"` + } +} + +// Status returns HTTPResponse.Status +func (r PostAlertingRuleIdSnoozeScheduleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAlertingRuleIdSnoozeScheduleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteAlertingRuleRuleidSnoozeScheduleScheduleidResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r DeleteAlertingRuleRuleidSnoozeScheduleScheduleidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteAlertingRuleRuleidSnoozeScheduleScheduleidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostAlertingRuleRuleIdAlertAlertIdMuteResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r PostAlertingRuleRuleIdAlertAlertIdMuteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAlertingRuleRuleIdAlertAlertIdMuteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostAlertingRuleRuleIdAlertAlertIdUnmuteResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r PostAlertingRuleRuleIdAlertAlertIdUnmuteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostAlertingRuleRuleIdAlertAlertIdUnmuteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetRuleTypesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]struct { + // ActionGroups An explicit list of groups for which the rule type can schedule actions, each with the action group's unique ID and human readable name. Rule actions validation uses this configuration to ensure that groups are valid. + ActionGroups *[]struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"action_groups,omitempty"` + + // ActionVariables A list of action variables that the rule type makes available via context and state in action parameter templates, and a short human readable description. When you create a rule in Kibana, it uses this information to prompt you for these variables in action parameter editors. + ActionVariables *struct { + Context *[]struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + UseWithTripleBracesInTemplates *bool `json:"useWithTripleBracesInTemplates,omitempty"` + } `json:"context,omitempty"` + Params *[]struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"params,omitempty"` + State *[]struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"state,omitempty"` + } `json:"action_variables,omitempty"` + + // Alerts Details for writing alerts as data documents for this rule type. + Alerts *struct { + // Context The namespace for this rule type. + Context *GetRuleTypes200AlertsContext `json:"context,omitempty"` + + // Dynamic Indicates whether new fields are added dynamically. + Dynamic *GetRuleTypes200AlertsDynamic `json:"dynamic,omitempty"` + + // IsSpaceAware Indicates whether the alerts are space-aware. If true, space-specific alert indices are used. + IsSpaceAware *bool `json:"isSpaceAware,omitempty"` + Mappings *struct { + // FieldMap Mapping information for each field supported in alerts as data documents for this rule type. For more information about mapping parameters, refer to the Elasticsearch documentation. + FieldMap *map[string]AlertingFieldmapProperties `json:"fieldMap,omitempty"` + } `json:"mappings,omitempty"` + + // SecondaryAlias A secondary alias. It is typically used to support the signals alias for detection rules. + SecondaryAlias *string `json:"secondaryAlias,omitempty"` + + // ShouldWrite Indicates whether the rule should write out alerts as data. + ShouldWrite *bool `json:"shouldWrite,omitempty"` + + // UseEcs Indicates whether to include the ECS component template for the alerts. + UseEcs *bool `json:"useEcs,omitempty"` + + // UseLegacyAlerts Indicates whether to include the legacy component template for the alerts. + UseLegacyAlerts *bool `json:"useLegacyAlerts,omitempty"` + } `json:"alerts,omitempty"` + + // AuthorizedConsumers The list of the plugins IDs that have access to the rule type. + AuthorizedConsumers *struct { + Alerts *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"alerts,omitempty"` + Apm *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"apm,omitempty"` + Discover *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"discover,omitempty"` + Infrastructure *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"infrastructure,omitempty"` + Logs *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"logs,omitempty"` + Ml *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"ml,omitempty"` + Monitoring *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"monitoring,omitempty"` + Siem *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"siem,omitempty"` + Slo *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"slo,omitempty"` + StackAlerts *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"stackAlerts,omitempty"` + Uptime *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"uptime,omitempty"` + } `json:"authorized_consumers,omitempty"` + + // Category The rule category, which is used by features such as category-specific maintenance windows. + Category *GetRuleTypes200Category `json:"category,omitempty"` + + // DefaultActionGroupId The default identifier for the rule type group. + DefaultActionGroupId *string `json:"default_action_group_id,omitempty"` + + // DoesSetRecoveryContext Indicates whether the rule passes context variables to its recovery action. + DoesSetRecoveryContext *bool `json:"does_set_recovery_context,omitempty"` + + // EnabledInLicense Indicates whether the rule type is enabled or disabled based on the subscription. + EnabledInLicense *bool `json:"enabled_in_license,omitempty"` + + // HasAlertsMappings Indicates whether the rule type has custom mappings for the alert data. + HasAlertsMappings *bool `json:"has_alerts_mappings,omitempty"` + HasFieldsForAAD *bool `json:"has_fields_for_a_a_d,omitempty"` + + // Id The unique identifier for the rule type. + Id *string `json:"id,omitempty"` + + // IsExportable Indicates whether the rule type is exportable in **Stack Management > Saved Objects**. + IsExportable *bool `json:"is_exportable,omitempty"` + + // MinimumLicenseRequired The subscriptions required to use the rule type. + MinimumLicenseRequired *string `json:"minimum_license_required,omitempty"` + + // Name The descriptive name of the rule type. + Name *string `json:"name,omitempty"` + + // Producer An identifier for the application that produces this rule type. + Producer *string `json:"producer,omitempty"` + + // RecoveryActionGroup An action group to use when an alert goes from an active state to an inactive one. + RecoveryActionGroup *struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"recovery_action_group,omitempty"` + RuleTaskTimeout *string `json:"rule_task_timeout,omitempty"` + } + JSON401 *Alerting401Response +} +type GetRuleTypes200AlertsContext string +type GetRuleTypes200AlertsDynamic string +type GetRuleTypes200Category string + +// Status returns HTTPResponse.Status +func (r GetRuleTypesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetRuleTypesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetAlertingRulesFindResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Actions []struct { + // AlertsFilter Defines a period that limits whether the action runs. + AlertsFilter *struct { + Query *struct { + // Dsl A filter written in Elasticsearch Query Domain Specific Language (DSL). + Dsl *string `json:"dsl,omitempty"` + + // Filters A filter written in Elasticsearch Query Domain Specific Language (DSL) as defined in the `kbn-es-query` package. + Filters []struct { + State *struct { + // Store A filter can be either specific to an application context or applied globally. + Store GetAlertingRulesFind200ActionsAlertsFilterQueryFiltersStateStore `json:"store"` + } `json:"$state,omitempty"` + Meta map[string]interface{} `json:"meta"` + Query *map[string]interface{} `json:"query,omitempty"` + } `json:"filters"` + + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query,omitempty"` + Timeframe *struct { + // Days Defines the days of the week that the action can run, represented as an array of numbers. For example, `1` represents Monday. An empty array is equivalent to specifying all the days of the week. + Days []GetAlertingRulesFind200ActionsAlertsFilterTimeframeDays `json:"days"` + Hours struct { + // End The end of the time frame in 24-hour notation (`hh:mm`). + End string `json:"end"` + + // Start The start of the time frame in 24-hour notation (`hh:mm`). + Start string `json:"start"` + } `json:"hours"` + + // Timezone The ISO time zone for the `hours` values. Values such as `UTC` and `UTC+1` also work but lack built-in daylight savings time support and are not recommended. + Timezone string `json:"timezone"` + } `json:"timeframe,omitempty"` + } `json:"alerts_filter,omitempty"` + + // ConnectorTypeId The type of connector. This property appears in responses but cannot be set in requests. + ConnectorTypeId string `json:"connector_type_id"` + Frequency *struct { + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen GetAlertingRulesFind200ActionsFrequencyNotifyWhen `json:"notify_when"` + + // Summary Indicates whether the action is a summary. + Summary bool `json:"summary"` + + // Throttle The throttle interval, which defines how often an alert generates repeated actions. It is specified in seconds, minutes, hours, or days and is applicable only if 'notify_when' is set to 'onThrottleInterval'. NOTE: You cannot specify the throttle interval at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle"` + } `json:"frequency,omitempty"` + + // Group The group name, which affects when the action runs (for example, when the threshold is met or when the alert is recovered). Each rule type has a list of valid action group names. If you don't need to group actions, set to `default`. + Group *string `json:"group,omitempty"` + + // Id The identifier for the connector saved object. + Id string `json:"id"` + + // Params The parameters for the action, which are sent to the connector. The `params` are handled as Mustache templates and passed a default set of context. + Params map[string]interface{} `json:"params"` + + // UseAlertDataForTemplate Indicates whether to use alert data as a template. + UseAlertDataForTemplate *bool `json:"use_alert_data_for_template,omitempty"` + + // Uuid A universally unique identifier (UUID) for the action. + Uuid *string `json:"uuid,omitempty"` + } `json:"actions"` + ActiveSnoozes *[]string `json:"active_snoozes,omitempty"` + + // AlertDelay Indicates that an alert occurs only when the specified number of consecutive runs met the rule conditions. + AlertDelay *struct { + // Active The number of consecutive runs that must meet the rule conditions. + Active float32 `json:"active"` + } `json:"alert_delay,omitempty"` + + // ApiKeyCreatedByUser Indicates whether the API key that is associated with the rule was created by the user. + ApiKeyCreatedByUser *bool `json:"api_key_created_by_user"` + + // ApiKeyOwner The owner of the API key that is associated with the rule and used to run background tasks. + ApiKeyOwner *string `json:"api_key_owner"` + Artifacts *struct { + Dashboards *[]struct { + Id string `json:"id"` + } `json:"dashboards,omitempty"` + InvestigationGuide *struct { + // Blob User-created content that describes alert causes and remdiation. + Blob string `json:"blob"` + } `json:"investigation_guide,omitempty"` + } `json:"artifacts,omitempty"` + + // Consumer The name of the application or feature that owns the rule. For example: `alerts`, `apm`, `discover`, `infrastructure`, `logs`, `metrics`, `ml`, `monitoring`, `securitySolution`, `siem`, `stackAlerts`, or `uptime`. + Consumer string `json:"consumer"` + + // CreatedAt The date and time that the rule was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the rule. + CreatedBy *string `json:"created_by"` + + // Enabled Indicates whether you want to run the rule on an interval basis after it is created. + Enabled bool `json:"enabled"` + ExecutionStatus struct { + Error *struct { + // Message Error message. + Message string `json:"message"` + + // Reason Reason for error. + Reason GetAlertingRulesFind200ExecutionStatusErrorReason `json:"reason"` + } `json:"error,omitempty"` + + // LastDuration Duration of last execution of the rule. + LastDuration *float32 `json:"last_duration,omitempty"` + + // LastExecutionDate The date and time when rule was executed last. + LastExecutionDate string `json:"last_execution_date"` + + // Status Status of rule execution. + Status GetAlertingRulesFind200ExecutionStatusStatus `json:"status"` + Warning *struct { + // Message Warning message. + Message string `json:"message"` + + // Reason Reason for warning. + Reason GetAlertingRulesFind200ExecutionStatusWarningReason `json:"reason"` + } `json:"warning,omitempty"` + } `json:"execution_status"` + + // Flapping When flapping detection is turned on, alerts that switch quickly between active and recovered states are identified as “flapping” and notifications are reduced. + Flapping *struct { + // LookBackWindow The minimum number of runs in which the threshold must be met. + LookBackWindow float32 `json:"look_back_window"` + + // StatusChangeThreshold The minimum number of times an alert must switch states in the look back window. + StatusChangeThreshold float32 `json:"status_change_threshold"` + } `json:"flapping"` + + // Id The identifier for the rule. + Id string `json:"id"` + + // IsSnoozedUntil The date when the rule will no longer be snoozed. + IsSnoozedUntil *string `json:"is_snoozed_until"` + LastRun *struct { + AlertsCount struct { + // Active Number of active alerts during last run. + Active *float32 `json:"active"` + + // Ignored Number of ignored alerts during last run. + Ignored *float32 `json:"ignored"` + + // New Number of new alerts during last run. + New *float32 `json:"new"` + + // Recovered Number of recovered alerts during last run. + Recovered *float32 `json:"recovered"` + } `json:"alerts_count"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome GetAlertingRulesFind200LastRunOutcome `json:"outcome"` + OutcomeMsg *[]string `json:"outcome_msg"` + + // OutcomeOrder Order of the outcome. + OutcomeOrder *float32 `json:"outcome_order,omitempty"` + + // Warning Warning of last rule execution. + Warning *GetAlertingRulesFind200LastRunWarning `json:"warning"` + } `json:"last_run"` + MappedParams *map[string]interface{} `json:"mapped_params,omitempty"` + + // Monitoring Monitoring details of the rule. + Monitoring *struct { + // Run Rule run details. + Run struct { + // CalculatedMetrics Calculation of different percentiles and success ratio. + CalculatedMetrics struct { + P50 *float32 `json:"p50,omitempty"` + P95 *float32 `json:"p95,omitempty"` + P99 *float32 `json:"p99,omitempty"` + SuccessRatio float32 `json:"success_ratio"` + } `json:"calculated_metrics"` + + // History History of the rule run. + History []struct { + // Duration Duration of the rule run. + Duration *float32 `json:"duration,omitempty"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome *GetAlertingRulesFind200MonitoringRunHistoryOutcome `json:"outcome,omitempty"` + + // Success Indicates whether the rule run was successful. + Success bool `json:"success"` + + // Timestamp Time of rule run. + Timestamp float32 `json:"timestamp"` + } `json:"history"` + LastRun struct { + Metrics struct { + // Duration Duration of most recent rule run. + Duration *float32 `json:"duration,omitempty"` + + // GapDurationS Duration in seconds of rule run gap. + GapDurationS *float32 `json:"gap_duration_s"` + GapRange *struct { + // Gte End of the gap range. + Gte string `json:"gte"` + + // Lte Start of the gap range. + Lte string `json:"lte"` + } `json:"gap_range"` + + // TotalAlertsCreated Total number of alerts created during last rule run. + TotalAlertsCreated *float32 `json:"total_alerts_created"` + + // TotalAlertsDetected Total number of alerts detected during last rule run. + TotalAlertsDetected *float32 `json:"total_alerts_detected"` + + // TotalIndexingDurationMs Total time spent indexing documents during last rule run in milliseconds. + TotalIndexingDurationMs *float32 `json:"total_indexing_duration_ms"` + + // TotalSearchDurationMs Total time spent performing Elasticsearch searches as measured by Kibana; includes network latency and time spent serializing or deserializing the request and response. + TotalSearchDurationMs *float32 `json:"total_search_duration_ms"` + } `json:"metrics"` + + // Timestamp Time of the most recent rule run. + Timestamp string `json:"timestamp"` + } `json:"last_run"` + } `json:"run"` + } `json:"monitoring,omitempty"` + + // MuteAll Indicates whether all alerts are muted. + MuteAll bool `json:"mute_all"` + MutedAlertIds []string `json:"muted_alert_ids"` + + // Name The name of the rule. + Name string `json:"name"` + + // NextRun Date and time of the next run of the rule. + NextRun *string `json:"next_run"` + + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen *GetAlertingRulesFind200NotifyWhen `json:"notify_when"` + + // Params The parameters for the rule. + Params map[string]interface{} `json:"params"` + + // Revision The rule revision number. + Revision float32 `json:"revision"` + + // RuleTypeId The rule type identifier. + RuleTypeId string `json:"rule_type_id"` + + // Running Indicates whether the rule is running. + Running *bool `json:"running"` + Schedule struct { + // Interval The interval is specified in seconds, minutes, hours, or days. + Interval string `json:"interval"` + } `json:"schedule"` + + // ScheduledTaskId Identifier of the scheduled task. + ScheduledTaskId *string `json:"scheduled_task_id,omitempty"` + SnoozeSchedule *[]struct { + // Duration Duration of the rule snooze schedule. + Duration float32 `json:"duration"` + + // Id Identifier of the rule snooze schedule. + Id *string `json:"id,omitempty"` + RRule struct { + Byhour *[]float32 `json:"byhour"` + Byminute *[]float32 `json:"byminute"` + Bymonth *[]float32 `json:"bymonth"` + Bymonthday *[]float32 `json:"bymonthday"` + Bysecond *[]float32 `json:"bysecond"` + Bysetpos *[]float32 `json:"bysetpos"` + Byweekday *[]GetAlertingRulesFind_200_SnoozeSchedule_RRule_Byweekday_Item `json:"byweekday"` + Byweekno *[]float32 `json:"byweekno"` + Byyearday *[]float32 `json:"byyearday"` + + // Count Number of times the rule should recur until it stops. + Count *float32 `json:"count,omitempty"` + + // Dtstart Rule start date in Coordinated Universal Time (UTC). + Dtstart string `json:"dtstart"` + + // Freq Indicates frequency of the rule. Options are YEARLY, MONTHLY, WEEKLY, DAILY. + Freq *GetAlertingRulesFind200SnoozeScheduleRRuleFreq `json:"freq,omitempty"` + + // Interval Indicates the interval of frequency. For example, 1 and YEARLY is every 1 year, 2 and WEEKLY is every 2 weeks. + Interval *float32 `json:"interval,omitempty"` + + // Tzid Indicates timezone abbreviation. + Tzid string `json:"tzid"` + + // Until Recur the rule until this date. + Until *string `json:"until,omitempty"` + + // Wkst Indicates the start of week, defaults to Monday. + Wkst *GetAlertingRulesFind200SnoozeScheduleRRuleWkst `json:"wkst,omitempty"` + } `json:"rRule"` + SkipRecurrences *[]string `json:"skipRecurrences,omitempty"` + } `json:"snooze_schedule,omitempty"` + Tags []string `json:"tags"` + + // Throttle Deprecated in 8.13.0. Use the `throttle` property in the action `frequency` object instead. The throttle interval, which defines how often an alert generates repeated actions. NOTE: You cannot specify the throttle interval at both the rule and action level. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Throttle *string `json:"throttle"` + + // UpdatedAt The date and time that the rule was updated most recently. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that updated this rule most recently. + UpdatedBy *string `json:"updated_by"` + + // ViewInAppRelativeUrl Relative URL to view rule in the app. + ViewInAppRelativeUrl *string `json:"view_in_app_relative_url"` + } +} +type GetAlertingRulesFind200ActionsAlertsFilterQueryFiltersStateStore string +type GetAlertingRulesFind200ActionsAlertsFilterTimeframeDays int +type GetAlertingRulesFind200ActionsFrequencyNotifyWhen string +type GetAlertingRulesFind200ExecutionStatusErrorReason string +type GetAlertingRulesFind200ExecutionStatusStatus string +type GetAlertingRulesFind200ExecutionStatusWarningReason string +type GetAlertingRulesFind200LastRunOutcome string +type GetAlertingRulesFind200LastRunWarning string +type GetAlertingRulesFind200MonitoringRunHistoryOutcome string +type GetAlertingRulesFind200NotifyWhen string +type GetAlertingRulesFind200SnoozeScheduleRRuleByweekday0 = string +type GetAlertingRulesFind200SnoozeScheduleRRuleByweekday1 = float32 +type GetAlertingRulesFind_200_SnoozeSchedule_RRule_Byweekday_Item struct { + union json.RawMessage +} +type GetAlertingRulesFind200SnoozeScheduleRRuleFreq int +type GetAlertingRulesFind200SnoozeScheduleRRuleWkst string + +// Status returns HTTPResponse.Status +func (r GetAlertingRulesFindResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetAlertingRulesFindResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateAgentKeyResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *APMUIAgentKeysResponse + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON403 *APMUI403Response + JSON500 *APMUI500Response +} + +// Status returns HTTPResponse.Status +func (r CreateAgentKeyResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateAgentKeyResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type SaveApmServerSchemaResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON403 *APMUI403Response + JSON404 *APMUI404Response +} + +// Status returns HTTPResponse.Status +func (r SaveApmServerSchemaResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r SaveApmServerSchemaResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateAnnotationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *APMUICreateAnnotationResponse + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON403 *APMUI403Response + JSON404 *APMUI404Response +} + +// Status returns HTTPResponse.Status +func (r CreateAnnotationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateAnnotationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetAnnotationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *APMUIAnnotationSearchResponse + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON500 *APMUI500Response +} + +// Status returns HTTPResponse.Status +func (r GetAnnotationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetAnnotationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteAgentConfigurationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *APMUIDeleteAgentConfigurationsResponse + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON403 *APMUI403Response + JSON404 *APMUI404Response +} + +// Status returns HTTPResponse.Status +func (r DeleteAgentConfigurationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteAgentConfigurationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetAgentConfigurationsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *APMUIAgentConfigurationsResponse + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON404 *APMUI404Response +} + +// Status returns HTTPResponse.Status +func (r GetAgentConfigurationsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetAgentConfigurationsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateUpdateAgentConfigurationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON403 *APMUI403Response + JSON404 *APMUI404Response +} + +// Status returns HTTPResponse.Status +func (r CreateUpdateAgentConfigurationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateUpdateAgentConfigurationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetAgentNameForServiceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *APMUIServiceAgentNameResponse + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON404 *APMUI404Response +} + +// Status returns HTTPResponse.Status +func (r GetAgentNameForServiceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetAgentNameForServiceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetEnvironmentsForServiceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *APMUIServiceEnvironmentsResponse + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON404 *APMUI404Response +} + +// Status returns HTTPResponse.Status +func (r GetEnvironmentsForServiceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetEnvironmentsForServiceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type SearchSingleConfigurationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *APMUISearchAgentConfigurationResponse + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON404 *APMUI404Response +} + +// Status returns HTTPResponse.Status +func (r SearchSingleConfigurationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r SearchSingleConfigurationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetSingleAgentConfigurationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *APMUISingleAgentConfigurationResponse + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON404 *APMUI404Response +} + +// Status returns HTTPResponse.Status +func (r GetSingleAgentConfigurationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetSingleAgentConfigurationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetSourceMapsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *APMUISourceMapsResponse + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON500 *APMUI500Response + JSON501 *APMUI501Response +} + +// Status returns HTTPResponse.Status +func (r GetSourceMapsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetSourceMapsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UploadSourceMapResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *APMUIUploadSourceMapsResponse + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON403 *APMUI403Response + JSON500 *APMUI500Response + JSON501 *APMUI501Response +} + +// Status returns HTTPResponse.Status +func (r UploadSourceMapResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UploadSourceMapResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteSourceMapResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *APMUI400Response + JSON401 *APMUI401Response + JSON403 *APMUI403Response + JSON500 *APMUI500Response + JSON501 *APMUI501Response +} + +// Status returns HTTPResponse.Status +func (r DeleteSourceMapResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteSourceMapResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteAssetCriticalityRecordResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Deleted True if the record was deleted or false if the record did not exist. + Deleted bool `json:"deleted"` + Record *SecurityEntityAnalyticsAPIAssetCriticalityRecord `json:"record,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r DeleteAssetCriticalityRecordResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteAssetCriticalityRecordResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetAssetCriticalityRecordResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEntityAnalyticsAPIAssetCriticalityRecord +} + +// Status returns HTTPResponse.Status +func (r GetAssetCriticalityRecordResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetAssetCriticalityRecordResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateAssetCriticalityRecordResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEntityAnalyticsAPIAssetCriticalityRecord +} + +// Status returns HTTPResponse.Status +func (r CreateAssetCriticalityRecordResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateAssetCriticalityRecordResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type BulkUpsertAssetCriticalityRecordsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Errors []SecurityEntityAnalyticsAPIAssetCriticalityBulkUploadErrorItem `json:"errors"` + Stats SecurityEntityAnalyticsAPIAssetCriticalityBulkUploadStats `json:"stats"` + } +} + +// Status returns HTTPResponse.Status +func (r BulkUpsertAssetCriticalityRecordsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r BulkUpsertAssetCriticalityRecordsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FindAssetCriticalityRecordsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Page int `json:"page"` + PerPage int `json:"per_page"` + Records []SecurityEntityAnalyticsAPIAssetCriticalityRecord `json:"records"` + Total int `json:"total"` + } +} + +// Status returns HTTPResponse.Status +func (r FindAssetCriticalityRecordsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FindAssetCriticalityRecordsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteCaseDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r DeleteCaseDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteCaseDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateCaseDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]CasesCaseResponseProperties + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r UpdateCaseDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateCaseDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateCaseDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CasesCaseResponseProperties + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r CreateCaseDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateCaseDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FindCasesDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Cases *[]CasesCaseResponseProperties `json:"cases,omitempty"` + CountClosedCases *int `json:"count_closed_cases,omitempty"` + CountInProgressCases *int `json:"count_in_progress_cases,omitempty"` + CountOpenCases *int `json:"count_open_cases,omitempty"` + Page *int `json:"page,omitempty"` + PerPage *int `json:"per_page,omitempty"` + Total *int `json:"total,omitempty"` + } + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r FindCasesDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FindCasesDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetCasesByAlertDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]struct { + // Id The case identifier. + Id *string `json:"id,omitempty"` + + // Title The case title. + Title *string `json:"title,omitempty"` + } + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r GetCasesByAlertDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetCasesByAlertDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetCaseConfigurationDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]struct { + // ClosureType Indicates whether a case is automatically closed when it is pushed to external systems (`close-by-pushing`) or not automatically closed (`close-by-user`). + ClosureType *CasesClosureTypes `json:"closure_type,omitempty"` + Connector *struct { + // Fields The fields specified in the case configuration are not used and are not propagated to individual cases, therefore it is recommended to set it to `null`. + Fields *map[string]interface{} `json:"fields"` + + // Id The identifier for the connector. If you do not want a default connector, use `none`. To retrieve connector IDs, use the find connectors API. + Id *string `json:"id,omitempty"` + + // Name The name of the connector. If you do not want a default connector, use `none`. To retrieve connector names, use the find connectors API. + Name *string `json:"name,omitempty"` + + // Type The type of connector. + Type *CasesConnectorTypes `json:"type,omitempty"` + } `json:"connector,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + CreatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"created_by,omitempty"` + + // CustomFields Custom fields configuration details. + CustomFields *[]struct { + // DefaultValue A default value for the custom field. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. + DefaultValue *GetCaseConfigurationDefaultSpace_200_CustomFields_DefaultValue `json:"defaultValue,omitempty"` + + // Key A unique key for the custom field. Must be lower case and composed only of a-z, 0-9, '_', and '-' characters. It is used in API calls to refer to a specific custom field. + Key *string `json:"key,omitempty"` + + // Label The custom field label that is displayed in the case. + Label *string `json:"label,omitempty"` + + // Required Indicates whether the field is required. If `false`, the custom field can be set to null or omitted when a case is created or updated. + Required *bool `json:"required,omitempty"` + + // Type The type of the custom field. + Type *GetCaseConfigurationDefaultSpace200CustomFieldsType `json:"type,omitempty"` + } `json:"customFields,omitempty"` + Error *string `json:"error"` + Id *string `json:"id,omitempty"` + Mappings *[]struct { + ActionType *string `json:"action_type,omitempty"` + Source *string `json:"source,omitempty"` + Target *string `json:"target,omitempty"` + } `json:"mappings,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner *CasesOwner `json:"owner,omitempty"` + Templates *CasesTemplates `json:"templates,omitempty"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"updated_by"` + Version *string `json:"version,omitempty"` + } + JSON401 *Cases4xxResponse +} +type GetCaseConfigurationDefaultSpace200CustomFieldsDefaultValue0 = string +type GetCaseConfigurationDefaultSpace200CustomFieldsDefaultValue1 = bool +type GetCaseConfigurationDefaultSpace_200_CustomFields_DefaultValue struct { + union json.RawMessage +} +type GetCaseConfigurationDefaultSpace200CustomFieldsType string + +// Status returns HTTPResponse.Status +func (r GetCaseConfigurationDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetCaseConfigurationDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type SetCaseConfigurationDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // ClosureType Indicates whether a case is automatically closed when it is pushed to external systems (`close-by-pushing`) or not automatically closed (`close-by-user`). + ClosureType *CasesClosureTypes `json:"closure_type,omitempty"` + Connector *struct { + // Fields The fields specified in the case configuration are not used and are not propagated to individual cases, therefore it is recommended to set it to `null`. + Fields *map[string]interface{} `json:"fields"` + + // Id The identifier for the connector. If you do not want a default connector, use `none`. To retrieve connector IDs, use the find connectors API. + Id *string `json:"id,omitempty"` + + // Name The name of the connector. If you do not want a default connector, use `none`. To retrieve connector names, use the find connectors API. + Name *string `json:"name,omitempty"` + + // Type The type of connector. + Type *CasesConnectorTypes `json:"type,omitempty"` + } `json:"connector,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + CreatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"created_by,omitempty"` + + // CustomFields Custom fields configuration details. + CustomFields *[]struct { + // DefaultValue A default value for the custom field. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. + DefaultValue *SetCaseConfigurationDefaultSpace_200_CustomFields_DefaultValue `json:"defaultValue,omitempty"` + + // Key A unique key for the custom field. Must be lower case and composed only of a-z, 0-9, '_', and '-' characters. It is used in API calls to refer to a specific custom field. + Key *string `json:"key,omitempty"` + + // Label The custom field label that is displayed in the case. + Label *string `json:"label,omitempty"` + + // Required Indicates whether the field is required. If `false`, the custom field can be set to null or omitted when a case is created or updated. + Required *bool `json:"required,omitempty"` + + // Type The type of the custom field. + Type *SetCaseConfigurationDefaultSpace200CustomFieldsType `json:"type,omitempty"` + } `json:"customFields,omitempty"` + Error *string `json:"error"` + Id *string `json:"id,omitempty"` + Mappings *[]struct { + ActionType *string `json:"action_type,omitempty"` + Source *string `json:"source,omitempty"` + Target *string `json:"target,omitempty"` + } `json:"mappings,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner *CasesOwner `json:"owner,omitempty"` + Templates *CasesTemplates `json:"templates,omitempty"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"updated_by"` + Version *string `json:"version,omitempty"` + } + JSON401 *Cases4xxResponse +} +type SetCaseConfigurationDefaultSpace200CustomFieldsDefaultValue0 = string +type SetCaseConfigurationDefaultSpace200CustomFieldsDefaultValue1 = bool +type SetCaseConfigurationDefaultSpace_200_CustomFields_DefaultValue struct { + union json.RawMessage +} +type SetCaseConfigurationDefaultSpace200CustomFieldsType string + +// Status returns HTTPResponse.Status +func (r SetCaseConfigurationDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r SetCaseConfigurationDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FindCaseConnectorsDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]struct { + // ActionTypeId The type of connector. + ActionTypeId *CasesConnectorTypes `json:"actionTypeId,omitempty"` + Config *FindCaseConnectorsDefaultSpace_200_Config `json:"config,omitempty"` + Id *string `json:"id,omitempty"` + IsDeprecated *bool `json:"isDeprecated,omitempty"` + IsMissingSecrets *bool `json:"isMissingSecrets,omitempty"` + IsPreconfigured *bool `json:"isPreconfigured,omitempty"` + Name *string `json:"name,omitempty"` + ReferencedByCount *int `json:"referencedByCount,omitempty"` + } + JSON401 *Cases4xxResponse +} +type FindCaseConnectorsDefaultSpace_200_Config struct { + ApiUrl *string `json:"apiUrl,omitempty"` + ProjectKey *string `json:"projectKey,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// Status returns HTTPResponse.Status +func (r FindCaseConnectorsDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FindCaseConnectorsDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateCaseConfigurationDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // ClosureType Indicates whether a case is automatically closed when it is pushed to external systems (`close-by-pushing`) or not automatically closed (`close-by-user`). + ClosureType *CasesClosureTypes `json:"closure_type,omitempty"` + Connector *struct { + // Fields The fields specified in the case configuration are not used and are not propagated to individual cases, therefore it is recommended to set it to `null`. + Fields *map[string]interface{} `json:"fields"` + + // Id The identifier for the connector. If you do not want a default connector, use `none`. To retrieve connector IDs, use the find connectors API. + Id *string `json:"id,omitempty"` + + // Name The name of the connector. If you do not want a default connector, use `none`. To retrieve connector names, use the find connectors API. + Name *string `json:"name,omitempty"` + + // Type The type of connector. + Type *CasesConnectorTypes `json:"type,omitempty"` + } `json:"connector,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + CreatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"created_by,omitempty"` + + // CustomFields Custom fields configuration details. + CustomFields *[]struct { + // DefaultValue A default value for the custom field. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. + DefaultValue *UpdateCaseConfigurationDefaultSpace_200_CustomFields_DefaultValue `json:"defaultValue,omitempty"` + + // Key A unique key for the custom field. Must be lower case and composed only of a-z, 0-9, '_', and '-' characters. It is used in API calls to refer to a specific custom field. + Key *string `json:"key,omitempty"` + + // Label The custom field label that is displayed in the case. + Label *string `json:"label,omitempty"` + + // Required Indicates whether the field is required. If `false`, the custom field can be set to null or omitted when a case is created or updated. + Required *bool `json:"required,omitempty"` + + // Type The type of the custom field. + Type *UpdateCaseConfigurationDefaultSpace200CustomFieldsType `json:"type,omitempty"` + } `json:"customFields,omitempty"` + Error *string `json:"error"` + Id *string `json:"id,omitempty"` + Mappings *[]struct { + ActionType *string `json:"action_type,omitempty"` + Source *string `json:"source,omitempty"` + Target *string `json:"target,omitempty"` + } `json:"mappings,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner *CasesOwner `json:"owner,omitempty"` + Templates *CasesTemplates `json:"templates,omitempty"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"updated_by"` + Version *string `json:"version,omitempty"` + } + JSON401 *Cases4xxResponse +} +type UpdateCaseConfigurationDefaultSpace200CustomFieldsDefaultValue0 = string +type UpdateCaseConfigurationDefaultSpace200CustomFieldsDefaultValue1 = bool +type UpdateCaseConfigurationDefaultSpace_200_CustomFields_DefaultValue struct { + union json.RawMessage +} +type UpdateCaseConfigurationDefaultSpace200CustomFieldsType string + +// Status returns HTTPResponse.Status +func (r UpdateCaseConfigurationDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateCaseConfigurationDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetCaseReportersDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r GetCaseReportersDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetCaseReportersDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetCaseTagsDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]string + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r GetCaseTagsDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetCaseTagsDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetCaseDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CasesCaseResponseProperties + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r GetCaseDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetCaseDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetCaseAlertsDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]CasesAlertResponseProperties + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r GetCaseAlertsDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetCaseAlertsDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteCaseCommentsDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r DeleteCaseCommentsDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteCaseCommentsDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateCaseCommentDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CasesCaseResponseProperties + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r UpdateCaseCommentDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateCaseCommentDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type AddCaseCommentDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CasesCaseResponseProperties + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r AddCaseCommentDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r AddCaseCommentDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FindCaseCommentsDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CasesCaseResponseProperties + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r FindCaseCommentsDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FindCaseCommentsDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteCaseCommentDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r DeleteCaseCommentDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteCaseCommentDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetCaseCommentDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + union json.RawMessage + } + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r GetCaseCommentDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetCaseCommentDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PushCaseDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CasesCaseResponseProperties + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r PushCaseDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PushCaseDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type AddCaseFileDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CasesCaseResponseProperties + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r AddCaseFileDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r AddCaseFileDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FindCaseActivityDefaultSpaceResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Page *int `json:"page,omitempty"` + PerPage *int `json:"perPage,omitempty"` + Total *int `json:"total,omitempty"` + UserActions *[]CasesUserActionsFindResponseProperties `json:"userActions,omitempty"` + } + JSON401 *Cases4xxResponse +} + +// Status returns HTTPResponse.Status +func (r FindCaseActivityDefaultSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FindCaseActivityDefaultSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateFieldsMetadataDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Acknowledged *bool `json:"acknowledged,omitempty"` + } + JSON400 *DataViews400Response +} + +// Status returns HTTPResponse.Status +func (r UpdateFieldsMetadataDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateFieldsMetadataDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateRuntimeFieldDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} +} + +// Status returns HTTPResponse.Status +func (r CreateRuntimeFieldDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateRuntimeFieldDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateUpdateRuntimeFieldDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + DataView *map[string]interface{} `json:"data_view,omitempty"` + Fields *[]map[string]interface{} `json:"fields,omitempty"` + } + JSON400 *DataViews400Response +} + +// Status returns HTTPResponse.Status +func (r CreateUpdateRuntimeFieldDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateUpdateRuntimeFieldDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteRuntimeFieldDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *DataViews404Response +} + +// Status returns HTTPResponse.Status +func (r DeleteRuntimeFieldDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteRuntimeFieldDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetRuntimeFieldDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + DataView *map[string]interface{} `json:"data_view,omitempty"` + Fields *[]map[string]interface{} `json:"fields,omitempty"` + } + JSON404 *DataViews404Response +} + +// Status returns HTTPResponse.Status +func (r GetRuntimeFieldDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetRuntimeFieldDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateRuntimeFieldDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *DataViews400Response +} + +// Status returns HTTPResponse.Status +func (r UpdateRuntimeFieldDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateRuntimeFieldDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetDefaultDataViewDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + DataViewId *string `json:"data_view_id,omitempty"` + } + JSON400 *DataViews400Response +} + +// Status returns HTTPResponse.Status +func (r GetDefaultDataViewDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetDefaultDataViewDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type SetDefaultDatailViewDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Acknowledged *bool `json:"acknowledged,omitempty"` + } + JSON400 *DataViews400Response +} + +// Status returns HTTPResponse.Status +func (r SetDefaultDatailViewDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r SetDefaultDatailViewDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type SwapDataViewsDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + DeleteStatus *struct { + DeletePerformed *bool `json:"deletePerformed,omitempty"` + RemainingRefs *int `json:"remainingRefs,omitempty"` + } `json:"deleteStatus,omitempty"` + Result *[]struct { + // Id A saved object identifier. + Id *string `json:"id,omitempty"` + + // Type The saved object type. + Type *string `json:"type,omitempty"` + } `json:"result,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r SwapDataViewsDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r SwapDataViewsDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PreviewSwapDataViewsDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Result *[]struct { + // Id A saved object identifier. + Id *string `json:"id,omitempty"` + + // Type The saved object type. + Type *string `json:"type,omitempty"` + } `json:"result,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PreviewSwapDataViewsDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PreviewSwapDataViewsDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteAlertsIndexResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Acknowledged bool `json:"acknowledged"` + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON403 *SecurityDetectionsAPISiemErrorResponse + JSON404 *string + JSON500 *SecurityDetectionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r DeleteAlertsIndexResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteAlertsIndexResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ReadAlertsIndexResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + IndexMappingOutdated *bool `json:"index_mapping_outdated"` + Name string `json:"name"` + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON403 *SecurityDetectionsAPISiemErrorResponse + JSON404 *SecurityDetectionsAPISiemErrorResponse + JSON500 *SecurityDetectionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r ReadAlertsIndexResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ReadAlertsIndexResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateAlertsIndexResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Acknowledged bool `json:"acknowledged"` + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON403 *SecurityDetectionsAPISiemErrorResponse + JSON404 *SecurityDetectionsAPISiemErrorResponse + JSON500 *SecurityDetectionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r CreateAlertsIndexResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateAlertsIndexResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ReadPrivilegesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + HasEncryptionKey bool `json:"has_encryption_key"` + IsAuthenticated bool `json:"is_authenticated"` + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON500 *SecurityDetectionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r ReadPrivilegesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ReadPrivilegesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteRuleResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityDetectionsAPIRuleResponse +} + +// Status returns HTTPResponse.Status +func (r DeleteRuleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteRuleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ReadRuleResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityDetectionsAPIRuleResponse +} + +// Status returns HTTPResponse.Status +func (r ReadRuleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ReadRuleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PatchRuleResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityDetectionsAPIRuleResponse +} + +// Status returns HTTPResponse.Status +func (r PatchRuleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PatchRuleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateRuleResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityDetectionsAPIRuleResponse +} + +// Status returns HTTPResponse.Status +func (r CreateRuleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateRuleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateRuleResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityDetectionsAPIRuleResponse +} + +// Status returns HTTPResponse.Status +func (r UpdateRuleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateRuleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PerformRulesBulkActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + union json.RawMessage + } +} + +// Status returns HTTPResponse.Status +func (r PerformRulesBulkActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PerformRulesBulkActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ExportRulesResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r ExportRulesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ExportRulesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FindRulesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Data []SecurityDetectionsAPIRuleResponse `json:"data"` + Page int `json:"page"` + PerPage int `json:"perPage"` + Total int `json:"total"` + } +} + +// Status returns HTTPResponse.Status +func (r FindRulesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FindRulesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ImportRulesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + ActionConnectorsErrors []SecurityDetectionsAPIErrorSchema `json:"action_connectors_errors"` + ActionConnectorsSuccess bool `json:"action_connectors_success"` + ActionConnectorsSuccessCount int `json:"action_connectors_success_count"` + ActionConnectorsWarnings []SecurityDetectionsAPIWarningSchema `json:"action_connectors_warnings"` + Errors []SecurityDetectionsAPIErrorSchema `json:"errors"` + ExceptionsErrors []SecurityDetectionsAPIErrorSchema `json:"exceptions_errors"` + ExceptionsSuccess bool `json:"exceptions_success"` + ExceptionsSuccessCount int `json:"exceptions_success_count"` + RulesCount int `json:"rules_count"` + Success bool `json:"success"` + SuccessCount int `json:"success_count"` + } +} + +// Status returns HTTPResponse.Status +func (r ImportRulesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ImportRulesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type InstallPrebuiltRulesAndTimelinesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // RulesInstalled The number of rules installed + RulesInstalled int `json:"rules_installed"` + + // RulesUpdated The number of rules updated + RulesUpdated int `json:"rules_updated"` + + // TimelinesInstalled The number of timelines installed + TimelinesInstalled int `json:"timelines_installed"` + + // TimelinesUpdated The number of timelines updated + TimelinesUpdated int `json:"timelines_updated"` + } +} + +// Status returns HTTPResponse.Status +func (r InstallPrebuiltRulesAndTimelinesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r InstallPrebuiltRulesAndTimelinesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ReadPrebuiltRulesAndTimelinesStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // RulesCustomInstalled The total number of custom rules + RulesCustomInstalled int `json:"rules_custom_installed"` + + // RulesInstalled The total number of installed prebuilt rules + RulesInstalled int `json:"rules_installed"` + + // RulesNotInstalled The total number of available prebuilt rules that are not installed + RulesNotInstalled int `json:"rules_not_installed"` + + // RulesNotUpdated The total number of outdated prebuilt rules + RulesNotUpdated int `json:"rules_not_updated"` + + // TimelinesInstalled The total number of installed prebuilt timelines + TimelinesInstalled int `json:"timelines_installed"` + + // TimelinesNotInstalled The total number of available prebuilt timelines that are not installed + TimelinesNotInstalled int `json:"timelines_not_installed"` + + // TimelinesNotUpdated The total number of outdated prebuilt timelines + TimelinesNotUpdated int `json:"timelines_not_updated"` + } +} + +// Status returns HTTPResponse.Status +func (r ReadPrebuiltRulesAndTimelinesStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ReadPrebuiltRulesAndTimelinesStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type RulePreviewResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + IsAborted *bool `json:"isAborted,omitempty"` + Logs []SecurityDetectionsAPIRulePreviewLogs `json:"logs"` + + // PreviewId A string that does not contain only whitespace characters + PreviewId *SecurityDetectionsAPINonEmptyString `json:"previewId,omitempty"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON500 *SecurityDetectionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r RulePreviewResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r RulePreviewResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateRuleExceptionListItemsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]SecurityExceptionsAPIExceptionListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r CreateRuleExceptionListItemsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateRuleExceptionListItemsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type SetAlertAssigneesResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r SetAlertAssigneesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r SetAlertAssigneesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FinalizeAlertsMigrationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]SecurityDetectionsAPIMigrationFinalizationResult + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON500 *SecurityDetectionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r FinalizeAlertsMigrationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FinalizeAlertsMigrationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type AlertsMigrationCleanupResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]SecurityDetectionsAPIMigrationCleanupResult + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON500 *SecurityDetectionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r AlertsMigrationCleanupResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r AlertsMigrationCleanupResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateAlertsMigrationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Indices []CreateAlertsMigration_200_Indices_Item `json:"indices"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON500 *SecurityDetectionsAPISiemErrorResponse +} +type CreateAlertsMigration_200_Indices_Item struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r CreateAlertsMigrationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateAlertsMigrationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ReadAlertsMigrationStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Indices []SecurityDetectionsAPIIndexMigrationStatus `json:"indices"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON500 *SecurityDetectionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r ReadAlertsMigrationStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ReadAlertsMigrationStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type SearchAlertsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON500 *SecurityDetectionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r SearchAlertsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r SearchAlertsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type SetAlertsStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON500 *SecurityDetectionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r SetAlertsStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r SetAlertsStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type SetAlertTagsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityDetectionsAPIPlatformErrorResponse + JSON500 *SecurityDetectionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r SetAlertTagsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r SetAlertTagsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ReadTagsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityDetectionsAPIRuleTagArray +} + +// Status returns HTTPResponse.Status +func (r ReadTagsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ReadTagsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type RotateEncryptionKeyResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Failed Indicates the number of the saved objects that were still encrypted with one of the old encryption keys that Kibana failed to re-encrypt with the primary key. + Failed *float32 `json:"failed,omitempty"` + + // Successful Indicates the total number of all encrypted saved objects (optionally filtered by the requested `type`), regardless of the key Kibana used for encryption. + // + // NOTE: In most cases, `total` will be greater than `successful` even if `failed` is zero. The reason is that Kibana may not need or may not be able to rotate encryption keys for all encrypted saved objects. + Successful *float32 `json:"successful,omitempty"` + + // Total Indicates the total number of all encrypted saved objects (optionally filtered by the requested `type`), regardless of the key Kibana used for encryption. + Total *float32 `json:"total,omitempty"` + } + JSON400 *SavedObjects400Response + JSON429 *map[string]interface{} +} + +// Status returns HTTPResponse.Status +func (r RotateEncryptionKeyResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r RotateEncryptionKeyResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointGetActionsListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIGetEndpointActionListResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointGetActionsListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointGetActionsListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointExecuteActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIExecuteRouteResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointExecuteActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointExecuteActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointGetFileActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIGetFileRouteResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointGetFileActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointGetFileActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointIsolateActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIIsolateRouteResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointIsolateActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointIsolateActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointKillProcessActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIKillProcessRouteResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointKillProcessActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointKillProcessActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointGetProcessesActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIGetProcessesRouteResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointGetProcessesActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointGetProcessesActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type RunScriptActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIResponseActionCreateSuccessResponse +} + +// Status returns HTTPResponse.Status +func (r RunScriptActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r RunScriptActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointScanActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIScanRouteResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointScanActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointScanActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointGetActionsStateResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIActionStateSuccessResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointGetActionsStateResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointGetActionsStateResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointSuspendProcessActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPISuspendProcessRouteResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointSuspendProcessActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointSuspendProcessActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointUnisolateActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIUnisolateRouteResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointUnisolateActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointUnisolateActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointUploadActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIUploadRouteResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointUploadActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointUploadActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointGetActionsDetailsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIGetEndpointActionResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointGetActionsDetailsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointGetActionsDetailsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointFileInfoResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPISuccessResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointFileInfoResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointFileInfoResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointFileDownloadResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPISuccessResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointFileDownloadResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointFileDownloadResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type EndpointGetActionsStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIActionStatusSuccessResponse +} + +// Status returns HTTPResponse.Status +func (r EndpointGetActionsStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r EndpointGetActionsStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetEndpointMetadataListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIMetadataListResponse +} + +// Status returns HTTPResponse.Status +func (r GetEndpointMetadataListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetEndpointMetadataListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetEndpointMetadataResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIEndpointMetadataResponse +} + +// Status returns HTTPResponse.Status +func (r GetEndpointMetadataResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetEndpointMetadataResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetPolicyResponseResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPISuccessResponse +} + +// Status returns HTTPResponse.Status +func (r GetPolicyResponseResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetPolicyResponseResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetProtectionUpdatesNoteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIProtectionUpdatesNoteResponse +} + +// Status returns HTTPResponse.Status +func (r GetProtectionUpdatesNoteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetProtectionUpdatesNoteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateUpdateProtectionUpdatesNoteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointManagementAPIProtectionUpdatesNoteResponse +} + +// Status returns HTTPResponse.Status +func (r CreateUpdateProtectionUpdatesNoteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateUpdateProtectionUpdatesNoteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateEndpointListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointExceptionsAPIEndpointList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON403 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON500 *SecurityEndpointExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r CreateEndpointListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateEndpointListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteEndpointListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointExceptionsAPIEndpointListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON403 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON404 *SecurityEndpointExceptionsAPISiemErrorResponse + JSON500 *SecurityEndpointExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r DeleteEndpointListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteEndpointListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ReadEndpointListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]SecurityEndpointExceptionsAPIEndpointListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON403 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON404 *SecurityEndpointExceptionsAPISiemErrorResponse + JSON500 *SecurityEndpointExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r ReadEndpointListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ReadEndpointListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateEndpointListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointExceptionsAPIEndpointListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON403 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON409 *SecurityEndpointExceptionsAPISiemErrorResponse + JSON500 *SecurityEndpointExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r CreateEndpointListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateEndpointListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateEndpointListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEndpointExceptionsAPIEndpointListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON403 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON404 *SecurityEndpointExceptionsAPISiemErrorResponse + JSON500 *SecurityEndpointExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r UpdateEndpointListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateEndpointListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FindEndpointListItemsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Data []SecurityEndpointExceptionsAPIEndpointListItem `json:"data"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Pit *string `json:"pit,omitempty"` + Total int `json:"total"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON403 *SecurityEndpointExceptionsAPIPlatformErrorResponse + JSON404 *SecurityEndpointExceptionsAPISiemErrorResponse + JSON500 *SecurityEndpointExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r FindEndpointListItemsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FindEndpointListItemsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteMonitoringEngineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Deleted bool `json:"deleted"` + } +} + +// Status returns HTTPResponse.Status +func (r DeleteMonitoringEngineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteMonitoringEngineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DisableMonitoringEngineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEntityAnalyticsAPIMonitoringEngineDescriptor +} + +// Status returns HTTPResponse.Status +func (r DisableMonitoringEngineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DisableMonitoringEngineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type InitMonitoringEngineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEntityAnalyticsAPIMonitoringEngineDescriptor +} + +// Status returns HTTPResponse.Status +func (r InitMonitoringEngineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r InitMonitoringEngineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ScheduleMonitoringEngineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Success Indicates the scheduling was successful + Success *bool `json:"success,omitempty"` + } + JSON409 *struct { + // Message Error message indicating the engine is already running + Message *string `json:"message,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r ScheduleMonitoringEngineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ScheduleMonitoringEngineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PrivMonHealthResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Error *struct { + Message *string `json:"message,omitempty"` + } `json:"error,omitempty"` + Status SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatus `json:"status"` + } +} + +// Status returns HTTPResponse.Status +func (r PrivMonHealthResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PrivMonHealthResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PrivMonPrivilegesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEntityAnalyticsAPIEntityAnalyticsPrivileges +} + +// Status returns HTTPResponse.Status +func (r PrivMonPrivilegesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PrivMonPrivilegesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreatePrivMonUserResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEntityAnalyticsAPIMonitoredUserDoc +} + +// Status returns HTTPResponse.Status +func (r CreatePrivMonUserResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreatePrivMonUserResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PrivmonBulkUploadUsersCSVResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Errors []SecurityEntityAnalyticsAPIPrivmonUserCsvUploadErrorItem `json:"errors"` + Stats SecurityEntityAnalyticsAPIPrivmonUserCsvUploadStats `json:"stats"` + } +} + +// Status returns HTTPResponse.Status +func (r PrivmonBulkUploadUsersCSVResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PrivmonBulkUploadUsersCSVResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ListPrivMonUsersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]SecurityEntityAnalyticsAPIMonitoredUserDoc +} + +// Status returns HTTPResponse.Status +func (r ListPrivMonUsersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ListPrivMonUsersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeletePrivMonUserResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Acknowledged Indicates if the deletion was successful + Acknowledged *bool `json:"acknowledged,omitempty"` + + // Message A message providing additional information about the deletion status + Message *string `json:"message,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r DeletePrivMonUserResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeletePrivMonUserResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdatePrivMonUserResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEntityAnalyticsAPIMonitoredUserDoc +} + +// Status returns HTTPResponse.Status +func (r UpdatePrivMonUserResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdatePrivMonUserResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type InstallPrivilegedAccessDetectionPackageResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Message string `json:"message"` + } +} + +// Status returns HTTPResponse.Status +func (r InstallPrivilegedAccessDetectionPackageResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r InstallPrivilegedAccessDetectionPackageResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetPrivilegedAccessDetectionPackageStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Jobs []struct { + Description *string `json:"description,omitempty"` + JobId string `json:"job_id"` + State GetPrivilegedAccessDetectionPackageStatus200JobsState `json:"state"` + } `json:"jobs"` + MlModuleSetupStatus GetPrivilegedAccessDetectionPackageStatus200MlModuleSetupStatus `json:"ml_module_setup_status"` + PackageInstallationStatus GetPrivilegedAccessDetectionPackageStatus200PackageInstallationStatus `json:"package_installation_status"` + } +} +type GetPrivilegedAccessDetectionPackageStatus200JobsState string +type GetPrivilegedAccessDetectionPackageStatus200MlModuleSetupStatus string +type GetPrivilegedAccessDetectionPackageStatus200PackageInstallationStatus string + +// Status returns HTTPResponse.Status +func (r GetPrivilegedAccessDetectionPackageStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetPrivilegedAccessDetectionPackageStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type InitEntityStoreResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Engines *[]SecurityEntityAnalyticsAPIEngineDescriptor `json:"engines,omitempty"` + Succeeded *bool `json:"succeeded,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r InitEntityStoreResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r InitEntityStoreResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ListEntityEnginesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Count *int `json:"count,omitempty"` + Engines *[]SecurityEntityAnalyticsAPIEngineDescriptor `json:"engines,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r ListEntityEnginesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ListEntityEnginesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ApplyEntityEngineDataviewIndicesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Result *[]SecurityEntityAnalyticsAPIEngineDataviewUpdateResult `json:"result,omitempty"` + Success *bool `json:"success,omitempty"` + } + JSON207 *struct { + Errors *[]string `json:"errors,omitempty"` + Result *[]SecurityEntityAnalyticsAPIEngineDataviewUpdateResult `json:"result,omitempty"` + Success *bool `json:"success,omitempty"` + } + JSON500 *struct { + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r ApplyEntityEngineDataviewIndicesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ApplyEntityEngineDataviewIndicesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteEntityEngineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Deleted *bool `json:"deleted,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r DeleteEntityEngineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteEntityEngineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetEntityEngineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEntityAnalyticsAPIEngineDescriptor +} + +// Status returns HTTPResponse.Status +func (r GetEntityEngineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetEntityEngineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type InitEntityEngineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEntityAnalyticsAPIEngineDescriptor +} + +// Status returns HTTPResponse.Status +func (r InitEntityEngineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r InitEntityEngineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type StartEntityEngineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Started *bool `json:"started,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r StartEntityEngineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r StartEntityEngineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type StopEntityEngineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Stopped *bool `json:"stopped,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r StopEntityEngineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r StopEntityEngineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ListEntitiesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Inspect *SecurityEntityAnalyticsAPIInspectQuery `json:"inspect,omitempty"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Records []SecurityEntityAnalyticsAPIEntity `json:"records"` + Total int `json:"total"` + } +} + +// Status returns HTTPResponse.Status +func (r ListEntitiesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ListEntitiesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetEntityStoreStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Engines []struct { + Components *[]SecurityEntityAnalyticsAPIEngineComponentStatus `json:"components,omitempty"` + Delay *string `json:"delay,omitempty"` + DocsPerSecond *int `json:"docsPerSecond,omitempty"` + Error *struct { + Action GetEntityStoreStatus200EnginesErrorAction `json:"action"` + Message string `json:"message"` + } `json:"error,omitempty"` + FieldHistoryLength int `json:"fieldHistoryLength"` + Filter *string `json:"filter,omitempty"` + Frequency *string `json:"frequency,omitempty"` + IndexPattern SecurityEntityAnalyticsAPIIndexPattern `json:"indexPattern"` + LookbackPeriod *string `json:"lookbackPeriod,omitempty"` + Status SecurityEntityAnalyticsAPIEngineStatus `json:"status"` + Timeout *string `json:"timeout,omitempty"` + TimestampField *string `json:"timestampField,omitempty"` + Type SecurityEntityAnalyticsAPIEntityType `json:"type"` + } `json:"engines"` + Status SecurityEntityAnalyticsAPIStoreStatus `json:"status"` + } +} +type GetEntityStoreStatus200EnginesErrorAction string + +// Status returns HTTPResponse.Status +func (r GetEntityStoreStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetEntityStoreStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteExceptionListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityExceptionsAPIExceptionList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON404 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r DeleteExceptionListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteExceptionListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ReadExceptionListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityExceptionsAPIExceptionList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON404 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r ReadExceptionListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ReadExceptionListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateExceptionListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityExceptionsAPIExceptionList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON409 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r CreateExceptionListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateExceptionListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateExceptionListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityExceptionsAPIExceptionList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON404 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r UpdateExceptionListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateExceptionListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DuplicateExceptionListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityExceptionsAPIExceptionList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON404 *SecurityExceptionsAPIPlatformErrorResponse + JSON405 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r DuplicateExceptionListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DuplicateExceptionListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ExportExceptionListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON404 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r ExportExceptionListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ExportExceptionListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FindExceptionListsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Data []SecurityExceptionsAPIExceptionList `json:"data"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Total int `json:"total"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r FindExceptionListsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FindExceptionListsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ImportExceptionListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Errors SecurityExceptionsAPIExceptionListsImportBulkErrorArray `json:"errors"` + Success bool `json:"success"` + SuccessCount int `json:"success_count"` + SuccessCountExceptionListItems int `json:"success_count_exception_list_items"` + SuccessCountExceptionLists int `json:"success_count_exception_lists"` + SuccessExceptionListItems bool `json:"success_exception_list_items"` + SuccessExceptionLists bool `json:"success_exception_lists"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r ImportExceptionListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ImportExceptionListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteExceptionListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityExceptionsAPIExceptionListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON404 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r DeleteExceptionListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteExceptionListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ReadExceptionListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityExceptionsAPIExceptionListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON404 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r ReadExceptionListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ReadExceptionListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateExceptionListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityExceptionsAPIExceptionListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON409 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r CreateExceptionListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateExceptionListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type UpdateExceptionListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityExceptionsAPIExceptionListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON404 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r UpdateExceptionListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateExceptionListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type FindExceptionListItemsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Data []SecurityExceptionsAPIExceptionListItem `json:"data"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Pit *string `json:"pit,omitempty"` + Total int `json:"total"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON404 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r FindExceptionListItemsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FindExceptionListItemsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type ReadExceptionListSummaryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Linux *int `json:"linux,omitempty"` + Macos *int `json:"macos,omitempty"` + Total *int `json:"total,omitempty"` + Windows *int `json:"windows,omitempty"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON404 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r ReadExceptionListSummaryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r ReadExceptionListSummaryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type CreateSharedExceptionListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityExceptionsAPIExceptionList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityExceptionsAPIPlatformErrorResponse + JSON403 *SecurityExceptionsAPIPlatformErrorResponse + JSON409 *SecurityExceptionsAPISiemErrorResponse + JSON500 *SecurityExceptionsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r CreateSharedExceptionListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r CreateSharedExceptionListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFeaturesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} +} + +// Status returns HTTPResponse.Status +func (r GetFeaturesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFeaturesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentDownloadSourcesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + Host string `json:"host"` + Id string `json:"id"` + IsDefault *bool `json:"is_default,omitempty"` + Name string `json:"name"` + + // ProxyId The ID of the proxy to use for this download source. See the proxies API for more information. + ProxyId *string `json:"proxy_id"` + Secrets *struct { + Ssl *struct { + Key *GetFleetAgentDownloadSources_200_Items_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetAgentDownloadSources200ItemsSecretsSslKey0 struct { + Id string `json:"id"` +} +type GetFleetAgentDownloadSources200ItemsSecretsSslKey1 = string +type GetFleetAgentDownloadSources_200_Items_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentDownloadSourcesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentDownloadSourcesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentDownloadSourcesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + Host string `json:"host"` + Id string `json:"id"` + IsDefault *bool `json:"is_default,omitempty"` + Name string `json:"name"` + + // ProxyId The ID of the proxy to use for this download source. See the proxies API for more information. + ProxyId *string `json:"proxy_id"` + Secrets *struct { + Ssl *struct { + Key *PostFleetAgentDownloadSources_200_Item_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PostFleetAgentDownloadSources200ItemSecretsSslKey0 struct { + Id string `json:"id"` +} +type PostFleetAgentDownloadSources200ItemSecretsSslKey1 = string +type PostFleetAgentDownloadSources_200_Item_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentDownloadSourcesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentDownloadSourcesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteFleetAgentDownloadSourcesSourceidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Id string `json:"id"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r DeleteFleetAgentDownloadSourcesSourceidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetAgentDownloadSourcesSourceidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentDownloadSourcesSourceidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + Host string `json:"host"` + Id string `json:"id"` + IsDefault *bool `json:"is_default,omitempty"` + Name string `json:"name"` + + // ProxyId The ID of the proxy to use for this download source. See the proxies API for more information. + ProxyId *string `json:"proxy_id"` + Secrets *struct { + Ssl *struct { + Key *GetFleetAgentDownloadSourcesSourceid_200_Item_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetAgentDownloadSourcesSourceid200ItemSecretsSslKey0 struct { + Id string `json:"id"` +} +type GetFleetAgentDownloadSourcesSourceid200ItemSecretsSslKey1 = string +type GetFleetAgentDownloadSourcesSourceid_200_Item_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentDownloadSourcesSourceidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentDownloadSourcesSourceidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutFleetAgentDownloadSourcesSourceidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + Host string `json:"host"` + Id string `json:"id"` + IsDefault *bool `json:"is_default,omitempty"` + Name string `json:"name"` + + // ProxyId The ID of the proxy to use for this download source. See the proxies API for more information. + ProxyId *string `json:"proxy_id"` + Secrets *struct { + Ssl *struct { + Key *PutFleetAgentDownloadSourcesSourceid_200_Item_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PutFleetAgentDownloadSourcesSourceid200ItemSecretsSslKey0 struct { + Id string `json:"id"` +} +type PutFleetAgentDownloadSourcesSourceid200ItemSecretsSslKey1 = string +type PutFleetAgentDownloadSourcesSourceid_200_Item_Secrets_Ssl_Key struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r PutFleetAgentDownloadSourcesSourceidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetAgentDownloadSourcesSourceidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentPoliciesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []AgentPolicy `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentPoliciesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentPoliciesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentPoliciesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item AgentPolicy `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentPoliciesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentPoliciesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentPoliciesBulkGetResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + AdvancedSettings *struct { + AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory"` + AgentDownloadTimeout interface{} `json:"agent_download_timeout"` + AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs"` + AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval"` + AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles"` + AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes"` + AgentLoggingLevel interface{} `json:"agent_logging_level"` + AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period"` + AgentLoggingToFiles interface{} `json:"agent_logging_to_files"` + AgentMonitoringRuntimeExperimental interface{} `json:"agent_monitoring_runtime_experimental"` + } `json:"advanced_settings,omitempty"` + AgentFeatures *[]struct { + Enabled bool `json:"enabled"` + Name string `json:"name"` + } `json:"agent_features,omitempty"` + Agentless *struct { + CloudConnectors *struct { + Enabled bool `json:"enabled"` + TargetCsp *string `json:"target_csp,omitempty"` + } `json:"cloud_connectors,omitempty"` + Resources *struct { + Requests *struct { + Cpu *string `json:"cpu,omitempty"` + Memory *string `json:"memory,omitempty"` + } `json:"requests,omitempty"` + } `json:"resources,omitempty"` + } `json:"agentless,omitempty"` + Agents *float32 `json:"agents,omitempty"` + DataOutputId *string `json:"data_output_id"` + Description *string `json:"description,omitempty"` + DownloadSourceId *string `json:"download_source_id"` + FleetServerHostId *string `json:"fleet_server_host_id"` + + // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. + GlobalDataTags *[]struct { + Name string `json:"name"` + Value PostFleetAgentPoliciesBulkGet_200_Items_GlobalDataTags_Value `json:"value"` + } `json:"global_data_tags,omitempty"` + HasFleetServer *bool `json:"has_fleet_server,omitempty"` + Id string `json:"id"` + InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` + IsManaged bool `json:"is_managed"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + + // IsProtected Indicates whether the agent policy has tamper protection enabled. Default false. + IsProtected bool `json:"is_protected"` + + // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled + KeepMonitoringAlive *bool `json:"keep_monitoring_alive"` + MonitoringDiagnostics *struct { + Limit *struct { + Burst *float32 `json:"burst,omitempty"` + Interval *string `json:"interval,omitempty"` + } `json:"limit,omitempty"` + Uploader *struct { + InitDur *string `json:"init_dur,omitempty"` + MaxDur *string `json:"max_dur,omitempty"` + MaxRetries *float32 `json:"max_retries,omitempty"` + } `json:"uploader,omitempty"` + } `json:"monitoring_diagnostics,omitempty"` + MonitoringEnabled *[]PostFleetAgentPoliciesBulkGet200ItemsMonitoringEnabled `json:"monitoring_enabled,omitempty"` + MonitoringHttp *struct { + Buffer *struct { + Enabled *bool `json:"enabled,omitempty"` + } `json:"buffer,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + Host *string `json:"host,omitempty"` + Port *float32 `json:"port,omitempty"` + } `json:"monitoring_http,omitempty"` + MonitoringOutputId *string `json:"monitoring_output_id"` + MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` + Name string `json:"name"` + Namespace string `json:"namespace"` + + // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *map[string]interface{} `json:"overrides"` + PackagePolicies *PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies `json:"package_policies,omitempty"` + RequiredVersions *[]struct { + // Percentage Target percentage of agents to auto upgrade + Percentage float32 `json:"percentage"` + + // Version Target version for automatic agent upgrade + Version string `json:"version"` + } `json:"required_versions"` + Revision float32 `json:"revision"` + SchemaVersion *string `json:"schema_version,omitempty"` + SpaceIds *[]string `json:"space_ids,omitempty"` + Status PostFleetAgentPoliciesBulkGet200ItemsStatus `json:"status"` + + // SupportsAgentless Indicates whether the agent policy supports agentless integrations. + SupportsAgentless *bool `json:"supports_agentless"` + UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` + UnprivilegedAgents *float32 `json:"unprivileged_agents,omitempty"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Version *string `json:"version,omitempty"` + } `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PostFleetAgentPoliciesBulkGet200ItemsGlobalDataTagsValue0 = string +type PostFleetAgentPoliciesBulkGet200ItemsGlobalDataTagsValue1 = float32 +type PostFleetAgentPoliciesBulkGet_200_Items_GlobalDataTags_Value struct { + union json.RawMessage +} +type PostFleetAgentPoliciesBulkGet200ItemsMonitoringEnabled string +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies0 = []string +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1 = []struct { + // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. + AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions"` + Agents *float32 `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + CreatedBy string `json:"created_by"` + + // Description Package policy description + Description *string `json:"description,omitempty"` + Elasticsearch *PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Elasticsearch `json:"elasticsearch,omitempty"` + Enabled bool `json:"enabled"` + Id string `json:"id"` + Inputs PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Inputs `json:"inputs"` + IsManaged *bool `json:"is_managed,omitempty"` + + // Name Package policy name (should be unique) + Name string `json:"name"` + + // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id"` + + // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *struct { + Inputs *map[string]interface{} `json:"inputs,omitempty"` + } `json:"overrides"` + Package *struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` + } `json:"package,omitempty"` + + // PolicyId Agent policy ID where that package policy will be added + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + Revision float32 `json:"revision"` + SecretReferences *[]struct { + Id string `json:"id"` + } `json:"secret_references,omitempty"` + SpaceIds *[]string `json:"spaceIds,omitempty"` + + // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. + SupportsAgentless *bool `json:"supports_agentless"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Vars *PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Vars `json:"vars,omitempty"` + Version *string `json:"version,omitempty"` +} +type PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Elasticsearch_Privileges struct { + Cluster *[]string `json:"cluster,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Elasticsearch struct { + Privileges *PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Elasticsearch_Privileges `json:"privileges,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs0 = []struct { + CompiledInput interface{} `json:"compiled_input"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + PolicyTemplate *string `json:"policy_template,omitempty"` + Streams []struct { + CompiledStream interface{} `json:"compiled_stream"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + DataStream struct { + Dataset string `json:"dataset"` + Elasticsearch *struct { + DynamicDataset *bool `json:"dynamic_dataset,omitempty"` + DynamicNamespace *bool `json:"dynamic_namespace,omitempty"` + Privileges *struct { + Indices *[]string `json:"indices,omitempty"` + } `json:"privileges,omitempty"` + } `json:"elasticsearch,omitempty"` + Type string `json:"type"` + } `json:"data_stream"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + Release *PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs0StreamsRelease `json:"release,omitempty"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` + } `json:"streams"` + Type string `json:"type"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` +} +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs0StreamsRelease string +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1 map[string]struct { + // Enabled enable or disable that input, (default to true) + Enabled *bool `json:"enabled,omitempty"` + + // Streams Input streams (see integration documentation to know what streams are available) + Streams *map[string]struct { + // Enabled enable or disable that stream, (default to true) + Enabled *bool `json:"enabled,omitempty"` + + // Vars Input/stream level variable (see integration documentation for more information) + Vars *map[string]*PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties `json:"vars,omitempty"` + } `json:"streams,omitempty"` + + // Vars Input/stream level variable (see integration documentation for more information) + Vars *map[string]*PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties `json:"vars,omitempty"` +} +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1StreamsVars0 = bool +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1StreamsVars1 = string +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1StreamsVars2 = float32 +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1StreamsVars3 = []string +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1StreamsVars4 = []float32 +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1StreamsVars5 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1Vars0 = bool +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1Vars1 = string +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1Vars2 = float32 +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1Vars3 = []string +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1Vars4 = []float32 +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Inputs1Vars5 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Inputs struct { + union json.RawMessage +} +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Vars0 map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` +} +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Vars1 map[string]*PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Vars_1_AdditionalProperties +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Vars10 = bool +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Vars11 = string +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Vars12 = float32 +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Vars13 = []string +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Vars14 = []float32 +type PostFleetAgentPoliciesBulkGet200ItemsPackagePolicies1Vars15 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Vars_1_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies_1_Vars struct { + union json.RawMessage +} +type PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies struct { + union json.RawMessage +} +type PostFleetAgentPoliciesBulkGet200ItemsStatus string + +// Status returns HTTPResponse.Status +func (r PostFleetAgentPoliciesBulkGetResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentPoliciesBulkGetResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentPoliciesDeleteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Id string `json:"id"` + Name string `json:"name"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentPoliciesDeleteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentPoliciesDeleteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentPoliciesOutputsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + AgentPolicyId *string `json:"agentPolicyId,omitempty"` + Data struct { + Integrations *[]struct { + Id *string `json:"id,omitempty"` + IntegrationPolicyName *string `json:"integrationPolicyName,omitempty"` + Name *string `json:"name,omitempty"` + PkgName *string `json:"pkgName,omitempty"` + } `json:"integrations,omitempty"` + Output struct { + Id string `json:"id"` + Name string `json:"name"` + } `json:"output"` + } `json:"data"` + Monitoring struct { + Output struct { + Id string `json:"id"` + Name string `json:"name"` + } `json:"output"` + } `json:"monitoring"` + } `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentPoliciesOutputsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentPoliciesOutputsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentPoliciesAgentpolicyidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item AgentPolicy `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentPoliciesAgentpolicyidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentPoliciesAgentpolicyidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutFleetAgentPoliciesAgentpolicyidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item AgentPolicy `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PutFleetAgentPoliciesAgentpolicyidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetAgentPoliciesAgentpolicyidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + CurrentVersions []struct { + Agents float32 `json:"agents"` + FailedUpgradeAgents float32 `json:"failedUpgradeAgents"` + Version string `json:"version"` + } `json:"currentVersions"` + TotalAgents float32 `json:"totalAgents"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentPoliciesAgentpolicyidCopyResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + AdvancedSettings *struct { + AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory"` + AgentDownloadTimeout interface{} `json:"agent_download_timeout"` + AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs"` + AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval"` + AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles"` + AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes"` + AgentLoggingLevel interface{} `json:"agent_logging_level"` + AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period"` + AgentLoggingToFiles interface{} `json:"agent_logging_to_files"` + AgentMonitoringRuntimeExperimental interface{} `json:"agent_monitoring_runtime_experimental"` + } `json:"advanced_settings,omitempty"` + AgentFeatures *[]struct { + Enabled bool `json:"enabled"` + Name string `json:"name"` + } `json:"agent_features,omitempty"` + Agentless *struct { + CloudConnectors *struct { + Enabled bool `json:"enabled"` + TargetCsp *string `json:"target_csp,omitempty"` + } `json:"cloud_connectors,omitempty"` + Resources *struct { + Requests *struct { + Cpu *string `json:"cpu,omitempty"` + Memory *string `json:"memory,omitempty"` + } `json:"requests,omitempty"` + } `json:"resources,omitempty"` + } `json:"agentless,omitempty"` + Agents *float32 `json:"agents,omitempty"` + DataOutputId *string `json:"data_output_id"` + Description *string `json:"description,omitempty"` + DownloadSourceId *string `json:"download_source_id"` + FleetServerHostId *string `json:"fleet_server_host_id"` + + // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. + GlobalDataTags *[]struct { + Name string `json:"name"` + Value PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_GlobalDataTags_Value `json:"value"` + } `json:"global_data_tags,omitempty"` + HasFleetServer *bool `json:"has_fleet_server,omitempty"` + Id string `json:"id"` + InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` + IsManaged bool `json:"is_managed"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + + // IsProtected Indicates whether the agent policy has tamper protection enabled. Default false. + IsProtected bool `json:"is_protected"` + + // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled + KeepMonitoringAlive *bool `json:"keep_monitoring_alive"` + MonitoringDiagnostics *struct { + Limit *struct { + Burst *float32 `json:"burst,omitempty"` + Interval *string `json:"interval,omitempty"` + } `json:"limit,omitempty"` + Uploader *struct { + InitDur *string `json:"init_dur,omitempty"` + MaxDur *string `json:"max_dur,omitempty"` + MaxRetries *float32 `json:"max_retries,omitempty"` + } `json:"uploader,omitempty"` + } `json:"monitoring_diagnostics,omitempty"` + MonitoringEnabled *[]PostFleetAgentPoliciesAgentpolicyidCopy200ItemMonitoringEnabled `json:"monitoring_enabled,omitempty"` + MonitoringHttp *struct { + Buffer *struct { + Enabled *bool `json:"enabled,omitempty"` + } `json:"buffer,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + Host *string `json:"host,omitempty"` + Port *float32 `json:"port,omitempty"` + } `json:"monitoring_http,omitempty"` + MonitoringOutputId *string `json:"monitoring_output_id"` + MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` + Name string `json:"name"` + Namespace string `json:"namespace"` + + // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *map[string]interface{} `json:"overrides"` + PackagePolicies *PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies `json:"package_policies,omitempty"` + RequiredVersions *[]struct { + // Percentage Target percentage of agents to auto upgrade + Percentage float32 `json:"percentage"` + + // Version Target version for automatic agent upgrade + Version string `json:"version"` + } `json:"required_versions"` + Revision float32 `json:"revision"` + SchemaVersion *string `json:"schema_version,omitempty"` + SpaceIds *[]string `json:"space_ids,omitempty"` + Status PostFleetAgentPoliciesAgentpolicyidCopy200ItemStatus `json:"status"` + + // SupportsAgentless Indicates whether the agent policy supports agentless integrations. + SupportsAgentless *bool `json:"supports_agentless"` + UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` + UnprivilegedAgents *float32 `json:"unprivileged_agents,omitempty"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Version *string `json:"version,omitempty"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemGlobalDataTagsValue0 = string +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemGlobalDataTagsValue1 = float32 +type PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_GlobalDataTags_Value struct { + union json.RawMessage +} +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemMonitoringEnabled string +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies0 = []string +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1 = []struct { + // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. + AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions"` + Agents *float32 `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + CreatedBy string `json:"created_by"` + + // Description Package policy description + Description *string `json:"description,omitempty"` + Elasticsearch *PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Elasticsearch `json:"elasticsearch,omitempty"` + Enabled bool `json:"enabled"` + Id string `json:"id"` + Inputs PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Inputs `json:"inputs"` + IsManaged *bool `json:"is_managed,omitempty"` + + // Name Package policy name (should be unique) + Name string `json:"name"` + + // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id"` + + // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *struct { + Inputs *map[string]interface{} `json:"inputs,omitempty"` + } `json:"overrides"` + Package *struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` + } `json:"package,omitempty"` + + // PolicyId Agent policy ID where that package policy will be added + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + Revision float32 `json:"revision"` + SecretReferences *[]struct { + Id string `json:"id"` + } `json:"secret_references,omitempty"` + SpaceIds *[]string `json:"spaceIds,omitempty"` + + // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. + SupportsAgentless *bool `json:"supports_agentless"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Vars *PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Vars `json:"vars,omitempty"` + Version *string `json:"version,omitempty"` +} +type PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Elasticsearch_Privileges struct { + Cluster *[]string `json:"cluster,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Elasticsearch struct { + Privileges *PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Elasticsearch_Privileges `json:"privileges,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs0 = []struct { + CompiledInput interface{} `json:"compiled_input"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + PolicyTemplate *string `json:"policy_template,omitempty"` + Streams []struct { + CompiledStream interface{} `json:"compiled_stream"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + DataStream struct { + Dataset string `json:"dataset"` + Elasticsearch *struct { + DynamicDataset *bool `json:"dynamic_dataset,omitempty"` + DynamicNamespace *bool `json:"dynamic_namespace,omitempty"` + Privileges *struct { + Indices *[]string `json:"indices,omitempty"` + } `json:"privileges,omitempty"` + } `json:"elasticsearch,omitempty"` + Type string `json:"type"` + } `json:"data_stream"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + Release *PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs0StreamsRelease `json:"release,omitempty"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` + } `json:"streams"` + Type string `json:"type"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` +} +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs0StreamsRelease string +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1 map[string]struct { + // Enabled enable or disable that input, (default to true) + Enabled *bool `json:"enabled,omitempty"` + + // Streams Input streams (see integration documentation to know what streams are available) + Streams *map[string]struct { + // Enabled enable or disable that stream, (default to true) + Enabled *bool `json:"enabled,omitempty"` + + // Vars Input/stream level variable (see integration documentation for more information) + Vars *map[string]*PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties `json:"vars,omitempty"` + } `json:"streams,omitempty"` + + // Vars Input/stream level variable (see integration documentation for more information) + Vars *map[string]*PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties `json:"vars,omitempty"` +} +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1StreamsVars0 = bool +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1StreamsVars1 = string +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1StreamsVars2 = float32 +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1StreamsVars3 = []string +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1StreamsVars4 = []float32 +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1StreamsVars5 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1Vars0 = bool +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1Vars1 = string +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1Vars2 = float32 +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1Vars3 = []string +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1Vars4 = []float32 +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Inputs1Vars5 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Inputs struct { + union json.RawMessage +} +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Vars0 map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` +} +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Vars1 map[string]*PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Vars_1_AdditionalProperties +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Vars10 = bool +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Vars11 = string +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Vars12 = float32 +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Vars13 = []string +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Vars14 = []float32 +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemPackagePolicies1Vars15 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Vars_1_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies_1_Vars struct { + union json.RawMessage +} +type PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies struct { + union json.RawMessage +} +type PostFleetAgentPoliciesAgentpolicyidCopy200ItemStatus string + +// Status returns HTTPResponse.Status +func (r PostFleetAgentPoliciesAgentpolicyidCopyResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentPoliciesAgentpolicyidCopyResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentPoliciesAgentpolicyidDownloadResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *string + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON404 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentPoliciesAgentpolicyidDownloadResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentPoliciesAgentpolicyidDownloadResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentPoliciesAgentpolicyidFullResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item GetFleetAgentPoliciesAgentpolicyidFull_200_Item `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetAgentPoliciesAgentpolicyidFull200Item0 = string +type GetFleetAgentPoliciesAgentpolicyidFull200Item1 struct { + Agent *struct { + Download struct { + Secrets *GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Agent_Download_Secrets `json:"secrets,omitempty"` + SourceURI string `json:"sourceURI"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + Renegotiation *string `json:"renegotiation,omitempty"` + VerificationMode *string `json:"verification_mode,omitempty"` + } `json:"ssl,omitempty"` + TargetDirectory *string `json:"target_directory,omitempty"` + Timeout *string `json:"timeout,omitempty"` + } `json:"download"` + Features map[string]struct { + Enabled bool `json:"enabled"` + } `json:"features"` + Limits *struct { + GoMaxProcs *float32 `json:"go_max_procs,omitempty"` + } `json:"limits,omitempty"` + Logging *struct { + Files *struct { + Interval *string `json:"interval,omitempty"` + Keepfiles *float32 `json:"keepfiles,omitempty"` + Rotateeverybytes *float32 `json:"rotateeverybytes,omitempty"` + } `json:"files,omitempty"` + Level *string `json:"level,omitempty"` + Metrics *struct { + Period *string `json:"period,omitempty"` + } `json:"metrics,omitempty"` + ToFiles *bool `json:"to_files,omitempty"` + } `json:"logging,omitempty"` + Monitoring struct { + UnderscoreRuntimeExperimental *string `json:"_runtime_experimental,omitempty"` + Apm interface{} `json:"apm"` + Enabled bool `json:"enabled"` + Logs bool `json:"logs"` + Metrics bool `json:"metrics"` + Namespace *string `json:"namespace,omitempty"` + Traces bool `json:"traces"` + UseOutput *string `json:"use_output,omitempty"` + } `json:"monitoring"` + Protection *struct { + Enabled bool `json:"enabled"` + SigningKey string `json:"signing_key"` + UninstallTokenHash string `json:"uninstall_token_hash"` + } `json:"protection,omitempty"` + } `json:"agent,omitempty"` + Connectors *map[string]interface{} `json:"connectors,omitempty"` + Exporters *map[string]interface{} `json:"exporters,omitempty"` + Extensions *map[string]interface{} `json:"extensions,omitempty"` + Fleet *GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Fleet `json:"fleet,omitempty"` + Id string `json:"id"` + Inputs []GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Item `json:"inputs"` + Namespaces *[]string `json:"namespaces,omitempty"` + OutputPermissions *map[string]map[string]interface{} `json:"output_permissions,omitempty"` + Outputs map[string]GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Outputs_AdditionalProperties `json:"outputs"` + Processors *map[string]interface{} `json:"processors,omitempty"` + Receivers *map[string]interface{} `json:"receivers,omitempty"` + Revision *float32 `json:"revision,omitempty"` + SecretReferences *[]struct { + Id string `json:"id"` + } `json:"secret_references,omitempty"` + Service *struct { + Extensions *[]string `json:"extensions,omitempty"` + Pipelines *map[string]struct { + Exporters *[]string `json:"exporters,omitempty"` + Processors *[]string `json:"processors,omitempty"` + Receivers *[]string `json:"receivers,omitempty"` + } `json:"pipelines,omitempty"` + } `json:"service,omitempty"` + Signed *struct { + Data string `json:"data"` + Signature string `json:"signature"` + } `json:"signed,omitempty"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Agent_Download_Secrets_Ssl_Key struct { + Id *string `json:"id,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Agent_Download_Secrets_Ssl struct { + Key GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Agent_Download_Secrets_Ssl_Key `json:"key"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Agent_Download_Secrets struct { + Ssl *GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Agent_Download_Secrets_Ssl `json:"ssl,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull200Item1Fleet0 struct { + Hosts []string `json:"hosts"` + ProxyHeaders interface{} `json:"proxy_headers"` + ProxyUrl *string `json:"proxy_url,omitempty"` + Secrets *GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Fleet_0_Secrets `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + Renegotiation *string `json:"renegotiation,omitempty"` + VerificationMode *string `json:"verification_mode,omitempty"` + } `json:"ssl,omitempty"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Fleet_0_Secrets_Ssl_Key struct { + Id *string `json:"id,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Fleet_0_Secrets_Ssl struct { + Key GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Fleet_0_Secrets_Ssl_Key `json:"key"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Fleet_0_Secrets struct { + Ssl *GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Fleet_0_Secrets_Ssl `json:"ssl,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull200Item1Fleet1 struct { + Kibana struct { + Hosts []string `json:"hosts"` + Path *string `json:"path,omitempty"` + Protocol string `json:"protocol"` + } `json:"kibana"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Fleet struct { + union json.RawMessage +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_DataStream struct { + Namespace string `json:"namespace"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Meta_Package struct { + Name string `json:"name"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Meta struct { + Package *GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Meta_Package `json:"package,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull200Item1InputsProcessorsAddFieldsFields0 = string +type GetFleetAgentPoliciesAgentpolicyidFull200Item1InputsProcessorsAddFieldsFields1 = float32 +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Processors_AddFields_Fields_AdditionalProperties struct { + union json.RawMessage +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Processors_AddFields struct { + Fields map[string]GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Processors_AddFields_Fields_AdditionalProperties `json:"fields"` + Target string `json:"target"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Processors_Item struct { + AddFields GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Processors_AddFields `json:"add_fields"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Streams_DataStream struct { + Dataset string `json:"dataset"` + Type *string `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Streams_Item struct { + DataStream GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Streams_DataStream `json:"data_stream"` + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Item struct { + DataStream GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_DataStream `json:"data_stream"` + Id string `json:"id"` + Meta *GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Meta `json:"meta,omitempty"` + Name string `json:"name"` + PackagePolicyId string `json:"package_policy_id"` + Processors *[]GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Processors_Item `json:"processors,omitempty"` + Revision float32 `json:"revision"` + Streams *[]GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Inputs_Streams_Item `json:"streams,omitempty"` + Type string `json:"type"` + UseOutput string `json:"use_output"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item_1_Outputs_AdditionalProperties struct { + CaSha256 *string `json:"ca_sha256"` + Hosts *[]string `json:"hosts,omitempty"` + ProxyHeaders interface{} `json:"proxy_headers"` + ProxyUrl *string `json:"proxy_url,omitempty"` + Type string `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentPoliciesAgentpolicyidFull_200_Item struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentPoliciesAgentpolicyidFullResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentPoliciesAgentpolicyidFullResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentPoliciesAgentpolicyidOutputsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + AgentPolicyId *string `json:"agentPolicyId,omitempty"` + Data struct { + Integrations *[]struct { + Id *string `json:"id,omitempty"` + IntegrationPolicyName *string `json:"integrationPolicyName,omitempty"` + Name *string `json:"name,omitempty"` + PkgName *string `json:"pkgName,omitempty"` + } `json:"integrations,omitempty"` + Output struct { + Id string `json:"id"` + Name string `json:"name"` + } `json:"output"` + } `json:"data"` + Monitoring struct { + Output struct { + Id string `json:"id"` + Name string `json:"name"` + } `json:"output"` + } `json:"monitoring"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentPoliciesAgentpolicyidOutputsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentPoliciesAgentpolicyidOutputsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Results struct { + Active float32 `json:"active"` + All float32 `json:"all"` + Error float32 `json:"error"` + Events float32 `json:"events"` + Inactive float32 `json:"inactive"` + Offline float32 `json:"offline"` + Online float32 `json:"online"` + Orphaned *float32 `json:"orphaned,omitempty"` + Other float32 `json:"other"` + Unenrolled float32 `json:"unenrolled"` + Uninstalled *float32 `json:"uninstalled,omitempty"` + Updating float32 `json:"updating"` + } `json:"results"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentStatusDataResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + DataPreview []interface{} `json:"dataPreview"` + Items []map[string]struct { + Data bool `json:"data"` + } `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentStatusDataResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentStatusDataResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + AccessApiKey *string `json:"access_api_key,omitempty"` + AccessApiKeyId *string `json:"access_api_key_id,omitempty"` + Active bool `json:"active"` + Agent *GetFleetAgents_200_Items_Agent `json:"agent,omitempty"` + AuditUnenrolledReason *string `json:"audit_unenrolled_reason,omitempty"` + Components *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Status GetFleetAgents200ItemsComponentsStatus `json:"status"` + Type string `json:"type"` + Units *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Payload *map[string]interface{} `json:"payload,omitempty"` + Status GetFleetAgents200ItemsComponentsUnitsStatus `json:"status"` + Type GetFleetAgents200ItemsComponentsUnitsType `json:"type"` + } `json:"units,omitempty"` + } `json:"components,omitempty"` + DefaultApiKey *string `json:"default_api_key,omitempty"` + DefaultApiKeyHistory *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"default_api_key_history,omitempty"` + DefaultApiKeyId *string `json:"default_api_key_id,omitempty"` + EnrolledAt string `json:"enrolled_at"` + Id string `json:"id"` + LastCheckin *string `json:"last_checkin,omitempty"` + LastCheckinMessage *string `json:"last_checkin_message,omitempty"` + LastCheckinStatus *GetFleetAgents200ItemsLastCheckinStatus `json:"last_checkin_status,omitempty"` + LastKnownStatus *GetFleetAgents200ItemsLastKnownStatus `json:"last_known_status,omitempty"` + LocalMetadata map[string]interface{} `json:"local_metadata"` + Metrics *struct { + CpuAvg *float32 `json:"cpu_avg,omitempty"` + MemorySizeByteAvg *float32 `json:"memory_size_byte_avg,omitempty"` + } `json:"metrics,omitempty"` + Namespaces *[]string `json:"namespaces,omitempty"` + Outputs *map[string]struct { + ApiKeyId *string `json:"api_key_id,omitempty"` + ToRetireApiKeyIds *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"to_retire_api_key_ids,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"outputs,omitempty"` + Packages []string `json:"packages"` + PolicyId *string `json:"policy_id,omitempty"` + PolicyRevision *float32 `json:"policy_revision"` + Sort *[]interface{} `json:"sort,omitempty"` + Status *GetFleetAgents200ItemsStatus `json:"status,omitempty"` + Tags *[]string `json:"tags,omitempty"` + Type GetFleetAgents200ItemsType `json:"type"` + UnenrolledAt *string `json:"unenrolled_at,omitempty"` + UnenrollmentStartedAt *string `json:"unenrollment_started_at,omitempty"` + UnhealthyReason *[]GetFleetAgents200ItemsUnhealthyReason `json:"unhealthy_reason"` + UpgradeAttempts *[]string `json:"upgrade_attempts"` + UpgradeDetails *struct { + ActionId string `json:"action_id"` + Metadata *struct { + DownloadPercent *float32 `json:"download_percent,omitempty"` + DownloadRate *float32 `json:"download_rate,omitempty"` + ErrorMsg *string `json:"error_msg,omitempty"` + FailedState *GetFleetAgents200ItemsUpgradeDetailsMetadataFailedState `json:"failed_state,omitempty"` + RetryErrorMsg *string `json:"retry_error_msg,omitempty"` + RetryUntil *string `json:"retry_until,omitempty"` + ScheduledAt *string `json:"scheduled_at,omitempty"` + } `json:"metadata,omitempty"` + State GetFleetAgents200ItemsUpgradeDetailsState `json:"state"` + TargetVersion string `json:"target_version"` + } `json:"upgrade_details"` + UpgradeStartedAt *string `json:"upgrade_started_at"` + UpgradedAt *string `json:"upgraded_at"` + UserProvidedMetadata *map[string]interface{} `json:"user_provided_metadata,omitempty"` + } `json:"items"` + NextSearchAfter *string `json:"nextSearchAfter,omitempty"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Pit *string `json:"pit,omitempty"` + StatusSummary *map[string]float32 `json:"statusSummary,omitempty"` + Total float32 `json:"total"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetAgents_200_Items_Agent struct { + Id string `json:"id"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgents200ItemsComponentsStatus string +type GetFleetAgents200ItemsComponentsUnitsStatus string +type GetFleetAgents200ItemsComponentsUnitsType string +type GetFleetAgents200ItemsLastCheckinStatus string +type GetFleetAgents200ItemsLastKnownStatus string +type GetFleetAgents200ItemsStatus string +type GetFleetAgents200ItemsType string +type GetFleetAgents200ItemsUnhealthyReason string +type GetFleetAgents200ItemsUpgradeDetailsMetadataFailedState string +type GetFleetAgents200ItemsUpgradeDetailsState string + +// Status returns HTTPResponse.Status +func (r GetFleetAgentsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []string `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentsActionStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + ActionId string `json:"actionId"` + CancellationTime *string `json:"cancellationTime,omitempty"` + CompletionTime *string `json:"completionTime,omitempty"` + + // CreationTime creation time of action + CreationTime string `json:"creationTime"` + Expiration *string `json:"expiration,omitempty"` + HasRolloutPeriod *bool `json:"hasRolloutPeriod,omitempty"` + IsAutomatic *bool `json:"is_automatic,omitempty"` + LatestErrors *[]struct { + AgentId string `json:"agentId"` + Error string `json:"error"` + Hostname *string `json:"hostname,omitempty"` + Timestamp string `json:"timestamp"` + } `json:"latestErrors,omitempty"` + + // NbAgentsAck number of agents that acknowledged the action + NbAgentsAck float32 `json:"nbAgentsAck"` + + // NbAgentsActionCreated number of agents included in action from kibana + NbAgentsActionCreated float32 `json:"nbAgentsActionCreated"` + + // NbAgentsActioned number of agents actioned + NbAgentsActioned float32 `json:"nbAgentsActioned"` + + // NbAgentsFailed number of agents that failed to execute the action + NbAgentsFailed float32 `json:"nbAgentsFailed"` + + // NewPolicyId new policy id (POLICY_REASSIGN action) + NewPolicyId *string `json:"newPolicyId,omitempty"` + + // PolicyId policy id (POLICY_CHANGE action) + PolicyId *string `json:"policyId,omitempty"` + + // Revision new policy revision (POLICY_CHANGE action) + Revision *float32 `json:"revision,omitempty"` + + // StartTime start time of action (scheduled actions) + StartTime *string `json:"startTime,omitempty"` + Status GetFleetAgentsActionStatus200ItemsStatus `json:"status"` + Type GetFleetAgentsActionStatus200ItemsType `json:"type"` + + // Version agent version number (UPGRADE action) + Version *string `json:"version,omitempty"` + } `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetAgentsActionStatus200ItemsStatus string +type GetFleetAgentsActionStatus200ItemsType string + +// Status returns HTTPResponse.Status +func (r GetFleetAgentsActionStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentsActionStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsActionsActionidCancelResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + AckData interface{} `json:"ack_data"` + Agents *[]string `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + Data interface{} `json:"data"` + Expiration *string `json:"expiration,omitempty"` + Id string `json:"id"` + MinimumExecutionDuration *float32 `json:"minimum_execution_duration,omitempty"` + Namespaces *[]string `json:"namespaces,omitempty"` + RolloutDurationSeconds *float32 `json:"rollout_duration_seconds,omitempty"` + SentAt *string `json:"sent_at,omitempty"` + SourceUri *string `json:"source_uri,omitempty"` + StartTime *string `json:"start_time,omitempty"` + Total *float32 `json:"total,omitempty"` + Type string `json:"type"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsActionsActionidCancelResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsActionsActionidCancelResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentsAvailableVersionsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []string `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentsAvailableVersionsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentsAvailableVersionsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsBulkReassignResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + ActionId string `json:"actionId"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsBulkReassignResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsBulkReassignResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsBulkRequestDiagnosticsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + ActionId string `json:"actionId"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsBulkRequestDiagnosticsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsBulkRequestDiagnosticsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsBulkUnenrollResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + ActionId string `json:"actionId"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsBulkUnenrollResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsBulkUnenrollResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsBulkUpdateAgentTagsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + ActionId string `json:"actionId"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsBulkUpdateAgentTagsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsBulkUpdateAgentTagsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsBulkUpgradeResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + ActionId string `json:"actionId"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsBulkUpgradeResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsBulkUpgradeResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteFleetAgentsFilesFileidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Deleted bool `json:"deleted"` + Id string `json:"id"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r DeleteFleetAgentsFilesFileidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetAgentsFilesFileidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentsFilesFileidFilenameResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentsFilesFileidFilenameResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentsFilesFileidFilenameResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentsSetupResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + IsReady bool `json:"isReady"` + IsSecretsStorageEnabled *bool `json:"is_secrets_storage_enabled,omitempty"` + IsSpaceAwarenessEnabled *bool `json:"is_space_awareness_enabled,omitempty"` + MissingOptionalFeatures []GetFleetAgentsSetup200MissingOptionalFeatures `json:"missing_optional_features"` + MissingRequirements []GetFleetAgentsSetup200MissingRequirements `json:"missing_requirements"` + PackageVerificationKeyId *string `json:"package_verification_key_id,omitempty"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetAgentsSetup200MissingOptionalFeatures string +type GetFleetAgentsSetup200MissingRequirements string + +// Status returns HTTPResponse.Status +func (r GetFleetAgentsSetupResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentsSetupResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsSetupResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + IsInitialized bool `json:"isInitialized"` + NonFatalErrors []struct { + Message string `json:"message"` + Name string `json:"name"` + } `json:"nonFatalErrors"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsSetupResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsSetupResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentsTagsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []string `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetAgentsTagsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentsTagsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteFleetAgentsAgentidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Action DeleteFleetAgentsAgentid200Action `json:"action"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type DeleteFleetAgentsAgentid200Action string + +// Status returns HTTPResponse.Status +func (r DeleteFleetAgentsAgentidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetAgentsAgentidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentsAgentidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + AccessApiKey *string `json:"access_api_key,omitempty"` + AccessApiKeyId *string `json:"access_api_key_id,omitempty"` + Active bool `json:"active"` + Agent *GetFleetAgentsAgentid_200_Item_Agent `json:"agent,omitempty"` + AuditUnenrolledReason *string `json:"audit_unenrolled_reason,omitempty"` + Components *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Status GetFleetAgentsAgentid200ItemComponentsStatus `json:"status"` + Type string `json:"type"` + Units *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Payload *map[string]interface{} `json:"payload,omitempty"` + Status GetFleetAgentsAgentid200ItemComponentsUnitsStatus `json:"status"` + Type GetFleetAgentsAgentid200ItemComponentsUnitsType `json:"type"` + } `json:"units,omitempty"` + } `json:"components,omitempty"` + DefaultApiKey *string `json:"default_api_key,omitempty"` + DefaultApiKeyHistory *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"default_api_key_history,omitempty"` + DefaultApiKeyId *string `json:"default_api_key_id,omitempty"` + EnrolledAt string `json:"enrolled_at"` + Id string `json:"id"` + LastCheckin *string `json:"last_checkin,omitempty"` + LastCheckinMessage *string `json:"last_checkin_message,omitempty"` + LastCheckinStatus *GetFleetAgentsAgentid200ItemLastCheckinStatus `json:"last_checkin_status,omitempty"` + LastKnownStatus *GetFleetAgentsAgentid200ItemLastKnownStatus `json:"last_known_status,omitempty"` + LocalMetadata map[string]interface{} `json:"local_metadata"` + Metrics *struct { + CpuAvg *float32 `json:"cpu_avg,omitempty"` + MemorySizeByteAvg *float32 `json:"memory_size_byte_avg,omitempty"` + } `json:"metrics,omitempty"` + Namespaces *[]string `json:"namespaces,omitempty"` + Outputs *map[string]struct { + ApiKeyId *string `json:"api_key_id,omitempty"` + ToRetireApiKeyIds *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"to_retire_api_key_ids,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"outputs,omitempty"` + Packages []string `json:"packages"` + PolicyId *string `json:"policy_id,omitempty"` + PolicyRevision *float32 `json:"policy_revision"` + Sort *[]interface{} `json:"sort,omitempty"` + Status *GetFleetAgentsAgentid200ItemStatus `json:"status,omitempty"` + Tags *[]string `json:"tags,omitempty"` + Type GetFleetAgentsAgentid200ItemType `json:"type"` + UnenrolledAt *string `json:"unenrolled_at,omitempty"` + UnenrollmentStartedAt *string `json:"unenrollment_started_at,omitempty"` + UnhealthyReason *[]GetFleetAgentsAgentid200ItemUnhealthyReason `json:"unhealthy_reason"` + UpgradeAttempts *[]string `json:"upgrade_attempts"` + UpgradeDetails *struct { + ActionId string `json:"action_id"` + Metadata *struct { + DownloadPercent *float32 `json:"download_percent,omitempty"` + DownloadRate *float32 `json:"download_rate,omitempty"` + ErrorMsg *string `json:"error_msg,omitempty"` + FailedState *GetFleetAgentsAgentid200ItemUpgradeDetailsMetadataFailedState `json:"failed_state,omitempty"` + RetryErrorMsg *string `json:"retry_error_msg,omitempty"` + RetryUntil *string `json:"retry_until,omitempty"` + ScheduledAt *string `json:"scheduled_at,omitempty"` + } `json:"metadata,omitempty"` + State GetFleetAgentsAgentid200ItemUpgradeDetailsState `json:"state"` + TargetVersion string `json:"target_version"` + } `json:"upgrade_details"` + UpgradeStartedAt *string `json:"upgrade_started_at"` + UpgradedAt *string `json:"upgraded_at"` + UserProvidedMetadata *map[string]interface{} `json:"user_provided_metadata,omitempty"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetAgentsAgentid_200_Item_Agent struct { + Id string `json:"id"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetAgentsAgentid200ItemComponentsStatus string +type GetFleetAgentsAgentid200ItemComponentsUnitsStatus string +type GetFleetAgentsAgentid200ItemComponentsUnitsType string +type GetFleetAgentsAgentid200ItemLastCheckinStatus string +type GetFleetAgentsAgentid200ItemLastKnownStatus string +type GetFleetAgentsAgentid200ItemStatus string +type GetFleetAgentsAgentid200ItemType string +type GetFleetAgentsAgentid200ItemUnhealthyReason string +type GetFleetAgentsAgentid200ItemUpgradeDetailsMetadataFailedState string +type GetFleetAgentsAgentid200ItemUpgradeDetailsState string + +// Status returns HTTPResponse.Status +func (r GetFleetAgentsAgentidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentsAgentidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutFleetAgentsAgentidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + AccessApiKey *string `json:"access_api_key,omitempty"` + AccessApiKeyId *string `json:"access_api_key_id,omitempty"` + Active bool `json:"active"` + Agent *PutFleetAgentsAgentid_200_Item_Agent `json:"agent,omitempty"` + AuditUnenrolledReason *string `json:"audit_unenrolled_reason,omitempty"` + Components *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Status PutFleetAgentsAgentid200ItemComponentsStatus `json:"status"` + Type string `json:"type"` + Units *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Payload *map[string]interface{} `json:"payload,omitempty"` + Status PutFleetAgentsAgentid200ItemComponentsUnitsStatus `json:"status"` + Type PutFleetAgentsAgentid200ItemComponentsUnitsType `json:"type"` + } `json:"units,omitempty"` + } `json:"components,omitempty"` + DefaultApiKey *string `json:"default_api_key,omitempty"` + DefaultApiKeyHistory *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"default_api_key_history,omitempty"` + DefaultApiKeyId *string `json:"default_api_key_id,omitempty"` + EnrolledAt string `json:"enrolled_at"` + Id string `json:"id"` + LastCheckin *string `json:"last_checkin,omitempty"` + LastCheckinMessage *string `json:"last_checkin_message,omitempty"` + LastCheckinStatus *PutFleetAgentsAgentid200ItemLastCheckinStatus `json:"last_checkin_status,omitempty"` + LastKnownStatus *PutFleetAgentsAgentid200ItemLastKnownStatus `json:"last_known_status,omitempty"` + LocalMetadata map[string]interface{} `json:"local_metadata"` + Metrics *struct { + CpuAvg *float32 `json:"cpu_avg,omitempty"` + MemorySizeByteAvg *float32 `json:"memory_size_byte_avg,omitempty"` + } `json:"metrics,omitempty"` + Namespaces *[]string `json:"namespaces,omitempty"` + Outputs *map[string]struct { + ApiKeyId *string `json:"api_key_id,omitempty"` + ToRetireApiKeyIds *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"to_retire_api_key_ids,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"outputs,omitempty"` + Packages []string `json:"packages"` + PolicyId *string `json:"policy_id,omitempty"` + PolicyRevision *float32 `json:"policy_revision"` + Sort *[]interface{} `json:"sort,omitempty"` + Status *PutFleetAgentsAgentid200ItemStatus `json:"status,omitempty"` + Tags *[]string `json:"tags,omitempty"` + Type PutFleetAgentsAgentid200ItemType `json:"type"` + UnenrolledAt *string `json:"unenrolled_at,omitempty"` + UnenrollmentStartedAt *string `json:"unenrollment_started_at,omitempty"` + UnhealthyReason *[]PutFleetAgentsAgentid200ItemUnhealthyReason `json:"unhealthy_reason"` + UpgradeAttempts *[]string `json:"upgrade_attempts"` + UpgradeDetails *struct { + ActionId string `json:"action_id"` + Metadata *struct { + DownloadPercent *float32 `json:"download_percent,omitempty"` + DownloadRate *float32 `json:"download_rate,omitempty"` + ErrorMsg *string `json:"error_msg,omitempty"` + FailedState *PutFleetAgentsAgentid200ItemUpgradeDetailsMetadataFailedState `json:"failed_state,omitempty"` + RetryErrorMsg *string `json:"retry_error_msg,omitempty"` + RetryUntil *string `json:"retry_until,omitempty"` + ScheduledAt *string `json:"scheduled_at,omitempty"` + } `json:"metadata,omitempty"` + State PutFleetAgentsAgentid200ItemUpgradeDetailsState `json:"state"` + TargetVersion string `json:"target_version"` + } `json:"upgrade_details"` + UpgradeStartedAt *string `json:"upgrade_started_at"` + UpgradedAt *string `json:"upgraded_at"` + UserProvidedMetadata *map[string]interface{} `json:"user_provided_metadata,omitempty"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PutFleetAgentsAgentid_200_Item_Agent struct { + Id string `json:"id"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetAgentsAgentid200ItemComponentsStatus string +type PutFleetAgentsAgentid200ItemComponentsUnitsStatus string +type PutFleetAgentsAgentid200ItemComponentsUnitsType string +type PutFleetAgentsAgentid200ItemLastCheckinStatus string +type PutFleetAgentsAgentid200ItemLastKnownStatus string +type PutFleetAgentsAgentid200ItemStatus string +type PutFleetAgentsAgentid200ItemType string +type PutFleetAgentsAgentid200ItemUnhealthyReason string +type PutFleetAgentsAgentid200ItemUpgradeDetailsMetadataFailedState string +type PutFleetAgentsAgentid200ItemUpgradeDetailsState string + +// Status returns HTTPResponse.Status +func (r PutFleetAgentsAgentidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetAgentsAgentidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsAgentidActionsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + AckData interface{} `json:"ack_data"` + Agents *[]string `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + Data interface{} `json:"data"` + Expiration *string `json:"expiration,omitempty"` + Id string `json:"id"` + MinimumExecutionDuration *float32 `json:"minimum_execution_duration,omitempty"` + Namespaces *[]string `json:"namespaces,omitempty"` + RolloutDurationSeconds *float32 `json:"rollout_duration_seconds,omitempty"` + SentAt *string `json:"sent_at,omitempty"` + SourceUri *string `json:"source_uri,omitempty"` + StartTime *string `json:"start_time,omitempty"` + Total *float32 `json:"total,omitempty"` + Type string `json:"type"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsAgentidActionsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsAgentidActionsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsAgentidReassignResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsAgentidReassignResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsAgentidReassignResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsAgentidRequestDiagnosticsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + ActionId string `json:"actionId"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsAgentidRequestDiagnosticsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsAgentidRequestDiagnosticsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsAgentidUnenrollResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsAgentidUnenrollResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsAgentidUnenrollResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetAgentsAgentidUpgradeResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetAgentsAgentidUpgradeResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetAgentsAgentidUpgradeResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetAgentsAgentidUploadsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + ActionId string `json:"actionId"` + CreateTime string `json:"createTime"` + Error *string `json:"error,omitempty"` + FilePath string `json:"filePath"` + Id string `json:"id"` + Name string `json:"name"` + Status GetFleetAgentsAgentidUploads200ItemsStatus `json:"status"` + } `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetAgentsAgentidUploads200ItemsStatus string + +// Status returns HTTPResponse.Status +func (r GetFleetAgentsAgentidUploadsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetAgentsAgentidUploadsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetCheckPermissionsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Error *GetFleetCheckPermissions200Error `json:"error,omitempty"` + Success bool `json:"success"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetCheckPermissions200Error string + +// Status returns HTTPResponse.Status +func (r GetFleetCheckPermissionsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetCheckPermissionsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetDataStreamsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + DataStreams []struct { + Dashboards []struct { + Id string `json:"id"` + Title string `json:"title"` + } `json:"dashboards"` + Dataset string `json:"dataset"` + Index string `json:"index"` + LastActivityMs float32 `json:"last_activity_ms"` + Namespace string `json:"namespace"` + Package string `json:"package"` + PackageVersion string `json:"package_version"` + ServiceDetails *struct { + Environment string `json:"environment"` + ServiceName string `json:"serviceName"` + } `json:"serviceDetails"` + SizeInBytes float32 `json:"size_in_bytes"` + SizeInBytesFormatted GetFleetDataStreams_200_DataStreams_SizeInBytesFormatted `json:"size_in_bytes_formatted"` + Type string `json:"type"` + } `json:"data_streams"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetDataStreams200DataStreamsSizeInBytesFormatted0 = float32 +type GetFleetDataStreams200DataStreamsSizeInBytesFormatted1 = string +type GetFleetDataStreams_200_DataStreams_SizeInBytesFormatted struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r GetFleetDataStreamsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetDataStreamsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEnrollmentApiKeysResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []EnrollmentApiKey `json:"items"` + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + List []struct { + // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. + Active bool `json:"active"` + + // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. + ApiKey string `json:"api_key"` + + // ApiKeyId The ID of the API key in the Security API. + ApiKeyId string `json:"api_key_id"` + CreatedAt string `json:"created_at"` + Hidden *bool `json:"hidden,omitempty"` + Id string `json:"id"` + + // Name The name of the enrollment API key. + Name *string `json:"name,omitempty"` + + // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. + PolicyId *string `json:"policy_id,omitempty"` + } `json:"list"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEnrollmentApiKeysResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEnrollmentApiKeysResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetEnrollmentApiKeysResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Action PostFleetEnrollmentApiKeys200Action `json:"action"` + Item struct { + // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. + Active bool `json:"active"` + + // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. + ApiKey string `json:"api_key"` + + // ApiKeyId The ID of the API key in the Security API. + ApiKeyId string `json:"api_key_id"` + CreatedAt string `json:"created_at"` + Hidden *bool `json:"hidden,omitempty"` + Id string `json:"id"` + + // Name The name of the enrollment API key. + Name *string `json:"name,omitempty"` + + // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. + PolicyId *string `json:"policy_id,omitempty"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PostFleetEnrollmentApiKeys200Action string + +// Status returns HTTPResponse.Status +func (r PostFleetEnrollmentApiKeysResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetEnrollmentApiKeysResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteFleetEnrollmentApiKeysKeyidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Action DeleteFleetEnrollmentApiKeysKeyid200Action `json:"action"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type DeleteFleetEnrollmentApiKeysKeyid200Action string + +// Status returns HTTPResponse.Status +func (r DeleteFleetEnrollmentApiKeysKeyidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetEnrollmentApiKeysKeyidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEnrollmentApiKeysKeyidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. + Active bool `json:"active"` + + // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. + ApiKey string `json:"api_key"` + + // ApiKeyId The ID of the API key in the Security API. + ApiKeyId string `json:"api_key_id"` + CreatedAt string `json:"created_at"` + Hidden *bool `json:"hidden,omitempty"` + Id string `json:"id"` + + // Name The name of the enrollment API key. + Name *string `json:"name,omitempty"` + + // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. + PolicyId *string `json:"policy_id,omitempty"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEnrollmentApiKeysKeyidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEnrollmentApiKeysKeyidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetEpmBulkAssetsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + AppLink *string `json:"appLink,omitempty"` + Attributes struct { + Description *string `json:"description,omitempty"` + Service *string `json:"service,omitempty"` + Title *string `json:"title,omitempty"` + } `json:"attributes"` + Id string `json:"id"` + Type string `json:"type"` + UpdatedAt *string `json:"updatedAt,omitempty"` + } `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetEpmBulkAssetsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetEpmBulkAssetsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmCategoriesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + Count float32 `json:"count"` + Id string `json:"id"` + ParentId *string `json:"parent_id,omitempty"` + ParentTitle *string `json:"parent_title,omitempty"` + Title string `json:"title"` + } `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmCategoriesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmCategoriesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetEpmCustomIntegrationsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + UnderscoreMeta struct { + InstallSource string `json:"install_source"` + Name string `json:"name"` + } `json:"_meta"` + Items []PostFleetEpmCustomIntegrations_200_Items_Item `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PostFleetEpmCustomIntegrations200Items0 struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PostFleetEpmCustomIntegrations_200_Items_0_Type `json:"type"` +} +type PostFleetEpmCustomIntegrations200Items0Type0 string +type PostFleetEpmCustomIntegrations200Items0Type1 = string +type PostFleetEpmCustomIntegrations_200_Items_0_Type struct { + union json.RawMessage +} +type PostFleetEpmCustomIntegrations200Items1 struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type PostFleetEpmCustomIntegrations200Items1Type `json:"type"` + Version *string `json:"version,omitempty"` +} +type PostFleetEpmCustomIntegrations200Items1Type string +type PostFleetEpmCustomIntegrations_200_Items_Item struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r PostFleetEpmCustomIntegrationsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetEpmCustomIntegrationsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutFleetEpmCustomIntegrationsPkgnameResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PutFleetEpmCustomIntegrationsPkgnameResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetEpmCustomIntegrationsPkgnameResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmDataStreamsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + Name string `json:"name"` + } `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmDataStreamsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmDataStreamsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmPackagesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []PackageListItem `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmPackagesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmPackagesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetEpmPackagesResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r PostFleetEpmPackagesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetEpmPackagesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetEpmPackagesBulkResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []PostFleetEpmPackagesBulk_200_Items_Item `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PostFleetEpmPackagesBulk200Items0 struct { + Name string `json:"name"` + Result struct { + Assets *[]PostFleetEpmPackagesBulk_200_Items_0_Result_Assets_Item `json:"assets,omitempty"` + Error interface{} `json:"error"` + InstallSource *string `json:"installSource,omitempty"` + InstallType string `json:"installType"` + Status *PostFleetEpmPackagesBulk200Items0ResultStatus `json:"status,omitempty"` + } `json:"result"` + Version string `json:"version"` +} +type PostFleetEpmPackagesBulk200Items0ResultAssets0 struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PostFleetEpmPackagesBulk_200_Items_0_Result_Assets_0_Type `json:"type"` +} +type PostFleetEpmPackagesBulk200Items0ResultAssets0Type0 string +type PostFleetEpmPackagesBulk200Items0ResultAssets0Type1 = string +type PostFleetEpmPackagesBulk_200_Items_0_Result_Assets_0_Type struct { + union json.RawMessage +} +type PostFleetEpmPackagesBulk200Items0ResultAssets1 struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type PostFleetEpmPackagesBulk200Items0ResultAssets1Type `json:"type"` + Version *string `json:"version,omitempty"` +} +type PostFleetEpmPackagesBulk200Items0ResultAssets1Type string +type PostFleetEpmPackagesBulk_200_Items_0_Result_Assets_Item struct { + union json.RawMessage +} +type PostFleetEpmPackagesBulk200Items0ResultStatus string +type PostFleetEpmPackagesBulk200Items1 struct { + Error PostFleetEpmPackagesBulk_200_Items_1_Error `json:"error"` + Name string `json:"name"` + StatusCode float32 `json:"statusCode"` +} +type PostFleetEpmPackagesBulk200Items1Error0 = string +type PostFleetEpmPackagesBulk200Items1Error1 = interface{} +type PostFleetEpmPackagesBulk_200_Items_1_Error struct { + union json.RawMessage +} +type PostFleetEpmPackagesBulk_200_Items_Item struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r PostFleetEpmPackagesBulkResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetEpmPackagesBulkResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetEpmPackagesBulkUninstallResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + TaskId string `json:"taskId"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetEpmPackagesBulkUninstallResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetEpmPackagesBulkUninstallResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmPackagesBulkUninstallTaskidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Error *struct { + Message string `json:"message"` + } `json:"error,omitempty"` + Results *[]struct { + Error *struct { + Message string `json:"message"` + } `json:"error,omitempty"` + Name string `json:"name"` + Success bool `json:"success"` + } `json:"results,omitempty"` + Status string `json:"status"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmPackagesBulkUninstallTaskidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmPackagesBulkUninstallTaskidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetEpmPackagesBulkUpgradeResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + TaskId string `json:"taskId"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetEpmPackagesBulkUpgradeResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetEpmPackagesBulkUpgradeResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmPackagesBulkUpgradeTaskidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Error *struct { + Message string `json:"message"` + } `json:"error,omitempty"` + Results *[]struct { + Error *struct { + Message string `json:"message"` + } `json:"error,omitempty"` + Name string `json:"name"` + Success bool `json:"success"` + } `json:"results,omitempty"` + Status string `json:"status"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmPackagesBulkUpgradeTaskidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmPackagesBulkUpgradeTaskidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmPackagesInstalledResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + DataStreams []struct { + Name string `json:"name"` + Title string `json:"title"` + } `json:"dataStreams"` + Description *string `json:"description,omitempty"` + Icons *[]struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"icons,omitempty"` + Name string `json:"name"` + Status string `json:"status"` + Title *string `json:"title,omitempty"` + Version string `json:"version"` + } `json:"items"` + SearchAfter *[]GetFleetEpmPackagesInstalled_200_SearchAfter_Item `json:"searchAfter,omitempty"` + Total float32 `json:"total"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetEpmPackagesInstalled200SearchAfter0 = string +type GetFleetEpmPackagesInstalled200SearchAfter1 = float32 +type GetFleetEpmPackagesInstalled200SearchAfter2 = bool +type GetFleetEpmPackagesInstalled200SearchAfter3 = interface{} +type GetFleetEpmPackagesInstalled200SearchAfter4 = interface{} +type GetFleetEpmPackagesInstalled_200_SearchAfter_Item struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmPackagesInstalledResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmPackagesInstalledResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmPackagesLimitedResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []string `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmPackagesLimitedResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmPackagesLimitedResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmPackagesPkgnameStatsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Response struct { + AgentPolicyCount float32 `json:"agent_policy_count"` + PackagePolicyCount float32 `json:"package_policy_count"` + } `json:"response"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmPackagesPkgnameStatsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmPackagesPkgnameStatsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteFleetEpmPackagesPkgnamePkgversionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []DeleteFleetEpmPackagesPkgnamePkgversion_200_Items_Item `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type DeleteFleetEpmPackagesPkgnamePkgversion200Items0 struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type DeleteFleetEpmPackagesPkgnamePkgversion_200_Items_0_Type `json:"type"` +} +type DeleteFleetEpmPackagesPkgnamePkgversion200Items0Type0 string +type DeleteFleetEpmPackagesPkgnamePkgversion200Items0Type1 = string +type DeleteFleetEpmPackagesPkgnamePkgversion_200_Items_0_Type struct { + union json.RawMessage +} +type DeleteFleetEpmPackagesPkgnamePkgversion200Items1 struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type DeleteFleetEpmPackagesPkgnamePkgversion200Items1Type `json:"type"` + Version *string `json:"version,omitempty"` +} +type DeleteFleetEpmPackagesPkgnamePkgversion200Items1Type string +type DeleteFleetEpmPackagesPkgnamePkgversion_200_Items_Item struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r DeleteFleetEpmPackagesPkgnamePkgversionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetEpmPackagesPkgnamePkgversionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmPackagesPkgnamePkgversionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item PackageInfo `json:"item"` + Metadata *struct { + HasPolicies bool `json:"has_policies"` + } `json:"metadata,omitempty"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmPackagesPkgnamePkgversionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmPackagesPkgnamePkgversionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetEpmPackagesPkgnamePkgversionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + UnderscoreMeta struct { + InstallSource string `json:"install_source"` + Name string `json:"name"` + } `json:"_meta"` + Items []PostFleetEpmPackagesPkgnamePkgversion_200_Items_Item `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PostFleetEpmPackagesPkgnamePkgversion200Items0 struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PostFleetEpmPackagesPkgnamePkgversion_200_Items_0_Type `json:"type"` +} +type PostFleetEpmPackagesPkgnamePkgversion200Items0Type0 string +type PostFleetEpmPackagesPkgnamePkgversion200Items0Type1 = string +type PostFleetEpmPackagesPkgnamePkgversion_200_Items_0_Type struct { + union json.RawMessage +} +type PostFleetEpmPackagesPkgnamePkgversion200Items1 struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type PostFleetEpmPackagesPkgnamePkgversion200Items1Type `json:"type"` + Version *string `json:"version,omitempty"` +} +type PostFleetEpmPackagesPkgnamePkgversion200Items1Type string +type PostFleetEpmPackagesPkgnamePkgversion_200_Items_Item struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r PostFleetEpmPackagesPkgnamePkgversionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetEpmPackagesPkgnamePkgversionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutFleetEpmPackagesPkgnamePkgversionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item PutFleetEpmPackagesPkgnamePkgversion_200_Item `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_Conditions_Elastic struct { + Capabilities *[]string `json:"capabilities,omitempty"` + Subscription *string `json:"subscription,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_Conditions_Kibana struct { + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_Conditions struct { + Elastic *PutFleetEpmPackagesPkgnamePkgversion_200_Item_Conditions_Elastic `json:"elastic,omitempty"` + Kibana *PutFleetEpmPackagesPkgnamePkgversion_200_Item_Conditions_Kibana `json:"kibana,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_Discovery_Datasets_Item struct { + Name string `json:"name"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_Discovery_Fields_Item struct { + Name string `json:"name"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_Discovery struct { + Datasets *[]PutFleetEpmPackagesPkgnamePkgversion_200_Item_Discovery_Datasets_Item `json:"datasets,omitempty"` + Fields *[]PutFleetEpmPackagesPkgnamePkgversion_200_Item_Discovery_Fields_Item `json:"fields,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_Icons_Item struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoAdditionalSpacesInstalledKibanaType0 string +type PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoAdditionalSpacesInstalledKibanaType1 = string +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_AdditionalSpacesInstalledKibana_Type struct { + union json.RawMessage +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_AdditionalSpacesInstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_AdditionalSpacesInstalledKibana_Type `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_ExperimentalDataStreamFeatures_Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_ExperimentalDataStreamFeatures_Item struct { + DataStream string `json:"data_stream"` + Features PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_ExperimentalDataStreamFeatures_Features `json:"features"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoInstallSource string +type PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoInstallStatus string +type PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoInstalledEsType string +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_InstalledEs_Item struct { + Deferred *bool `json:"deferred,omitempty"` + Id string `json:"id"` + Type PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoInstalledEsType `json:"type"` + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoInstalledKibanaType0 string +type PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoInstalledKibanaType1 = string +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_InstalledKibana_Type struct { + union json.RawMessage +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_InstalledKibana_Item struct { + Id string `json:"id"` + OriginId *string `json:"originId,omitempty"` + Type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_InstalledKibana_Type `json:"type"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_LatestExecutedState struct { + Error *string `json:"error,omitempty"` + Name *string `json:"name,omitempty"` + StartedAt *string `json:"started_at,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_LatestInstallFailedAttempts_Error struct { + Message string `json:"message"` + Name string `json:"name"` + Stack *string `json:"stack,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_LatestInstallFailedAttempts_Item struct { + CreatedAt string `json:"created_at"` + Error PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_LatestInstallFailedAttempts_Error `json:"error"` + TargetVersion string `json:"target_version"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoVerificationStatus string +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo struct { + AdditionalSpacesInstalledKibana *map[string][]PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_AdditionalSpacesInstalledKibana_Item `json:"additional_spaces_installed_kibana,omitempty"` + CreatedAt *string `json:"created_at,omitempty"` + ExperimentalDataStreamFeatures *[]PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_ExperimentalDataStreamFeatures_Item `json:"experimental_data_stream_features,omitempty"` + InstallFormatSchemaVersion *string `json:"install_format_schema_version,omitempty"` + InstallSource PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoInstallSource `json:"install_source"` + InstallStatus PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoInstallStatus `json:"install_status"` + InstalledEs []PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_InstalledEs_Item `json:"installed_es"` + InstalledKibana []PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_InstalledKibana_Item `json:"installed_kibana"` + InstalledKibanaSpaceId *string `json:"installed_kibana_space_id,omitempty"` + LatestExecutedState *PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_LatestExecutedState `json:"latest_executed_state,omitempty"` + LatestInstallFailedAttempts *[]PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo_LatestInstallFailedAttempts_Item `json:"latest_install_failed_attempts,omitempty"` + Name string `json:"name"` + Namespaces *[]string `json:"namespaces,omitempty"` + PreviousVersion *string `json:"previous_version"` + Type string `json:"type"` + UpdatedAt *string `json:"updated_at,omitempty"` + VerificationKeyId *string `json:"verification_key_id"` + VerificationStatus PutFleetEpmPackagesPkgnamePkgversion200ItemInstallationInfoVerificationStatus `json:"verification_status"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion200ItemOwnerType string +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_Owner struct { + Github *string `json:"github,omitempty"` + Type *PutFleetEpmPackagesPkgnamePkgversion200ItemOwnerType `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion200ItemRelease string +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_Source struct { + License string `json:"license"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PutFleetEpmPackagesPkgnamePkgversion200ItemType0 string +type PutFleetEpmPackagesPkgnamePkgversion200ItemType1 string +type PutFleetEpmPackagesPkgnamePkgversion200ItemType2 string +type PutFleetEpmPackagesPkgnamePkgversion200ItemType3 = string +type PutFleetEpmPackagesPkgnamePkgversion_200_Item_Type struct { + union json.RawMessage +} +type PutFleetEpmPackagesPkgnamePkgversion_200_Item struct { + Agent *struct { + Privileges *struct { + Root *bool `json:"root,omitempty"` + } `json:"privileges,omitempty"` + } `json:"agent,omitempty"` + AssetTags *[]struct { + AssetIds *[]string `json:"asset_ids,omitempty"` + AssetTypes *[]string `json:"asset_types,omitempty"` + Text string `json:"text"` + } `json:"asset_tags,omitempty"` + Assets map[string]interface{} `json:"assets"` + Categories *[]string `json:"categories,omitempty"` + Conditions *PutFleetEpmPackagesPkgnamePkgversion_200_Item_Conditions `json:"conditions,omitempty"` + DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` + Description *string `json:"description,omitempty"` + Discovery *PutFleetEpmPackagesPkgnamePkgversion_200_Item_Discovery `json:"discovery,omitempty"` + Download *string `json:"download,omitempty"` + Elasticsearch *map[string]interface{} `json:"elasticsearch,omitempty"` + FormatVersion *string `json:"format_version,omitempty"` + Icons *[]PutFleetEpmPackagesPkgnamePkgversion_200_Item_Icons_Item `json:"icons,omitempty"` + InstallationInfo *PutFleetEpmPackagesPkgnamePkgversion_200_Item_InstallationInfo `json:"installationInfo,omitempty"` + Internal *bool `json:"internal,omitempty"` + KeepPoliciesUpToDate *bool `json:"keepPoliciesUpToDate,omitempty"` + LatestVersion *string `json:"latestVersion,omitempty"` + License *string `json:"license,omitempty"` + LicensePath *string `json:"licensePath,omitempty"` + Name string `json:"name"` + Notice *string `json:"notice,omitempty"` + Owner *PutFleetEpmPackagesPkgnamePkgversion_200_Item_Owner `json:"owner,omitempty"` + Path *string `json:"path,omitempty"` + PolicyTemplates *[]map[string]interface{} `json:"policy_templates,omitempty"` + Readme *string `json:"readme,omitempty"` + Release *PutFleetEpmPackagesPkgnamePkgversion200ItemRelease `json:"release,omitempty"` + Screenshots *[]struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"screenshots,omitempty"` + SignaturePath *string `json:"signature_path,omitempty"` + Source *PutFleetEpmPackagesPkgnamePkgversion_200_Item_Source `json:"source,omitempty"` + Status *string `json:"status,omitempty"` + Title string `json:"title"` + Type *PutFleetEpmPackagesPkgnamePkgversion_200_Item_Type `json:"type,omitempty"` + Vars *[]map[string]interface{} `json:"vars,omitempty"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// Status returns HTTPResponse.Status +func (r PutFleetEpmPackagesPkgnamePkgversionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetEpmPackagesPkgnamePkgversionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Success bool `json:"success"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Success bool `json:"success"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Success bool `json:"success"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]struct { + Error interface{} `json:"error"` + Success bool `json:"success"` + TransformId string `json:"transformId"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmPackagesPkgnamePkgversionFilepathResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *interface{} + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmPackagesPkgnamePkgversionFilepathResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmPackagesPkgnamePkgversionFilepathResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmTemplatesPkgnamePkgversionInputsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + union json.RawMessage + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetEpmTemplatesPkgnamePkgversionInputs2000 = string +type GetFleetEpmTemplatesPkgnamePkgversionInputs2001 struct { + Inputs []struct { + Id string `json:"id"` + Streams *[]GetFleetEpmTemplatesPkgnamePkgversionInputs_200_1_Inputs_Streams_Item `json:"streams,omitempty"` + Type string `json:"type"` + } `json:"inputs"` +} +type GetFleetEpmTemplatesPkgnamePkgversionInputs_200_1_Inputs_Streams_DataStream struct { + Dataset string `json:"dataset"` + Type *string `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type GetFleetEpmTemplatesPkgnamePkgversionInputs_200_1_Inputs_Streams_Item struct { + DataStream GetFleetEpmTemplatesPkgnamePkgversionInputs_200_1_Inputs_Streams_DataStream `json:"data_stream"` + Id string `json:"id"` + AdditionalProperties map[string]interface{} `json:"-"` +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmTemplatesPkgnamePkgversionInputsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmTemplatesPkgnamePkgversionInputsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetEpmVerificationKeyIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Id *string `json:"id"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetEpmVerificationKeyIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetEpmVerificationKeyIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetFleetServerHostsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []ServerHost `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetFleetServerHostsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetFleetServerHostsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetFleetServerHostsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item ServerHost `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetFleetServerHostsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetFleetServerHostsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteFleetFleetServerHostsItemidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Id string `json:"id"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r DeleteFleetFleetServerHostsItemidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetFleetServerHostsItemidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetFleetServerHostsItemidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item ServerHost `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetFleetServerHostsItemidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetFleetServerHostsItemidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutFleetFleetServerHostsItemidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item ServerHost `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PutFleetFleetServerHostsItemidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetFleetServerHostsItemidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetHealthCheckResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + HostId *string `json:"host_id,omitempty"` + Name *string `json:"name,omitempty"` + Status string `json:"status"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON404 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetHealthCheckResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetHealthCheckResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetKubernetesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item string `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetKubernetesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetKubernetesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetKubernetesDownloadResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *string + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON404 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetKubernetesDownloadResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetKubernetesDownloadResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetLogstashApiKeysResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + ApiKey string `json:"api_key"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetLogstashApiKeysResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetLogstashApiKeysResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetMessageSigningServiceRotateKeyPairResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Message string `json:"message"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON500 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetMessageSigningServiceRotateKeyPairResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetMessageSigningServiceRotateKeyPairResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetOutputsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []OutputUnion `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetOutputsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetOutputsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetOutputsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item OutputUnion `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetOutputsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetOutputsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteFleetOutputsOutputidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Id string `json:"id"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON404 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r DeleteFleetOutputsOutputidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetOutputsOutputidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetOutputsOutputidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item OutputUnion `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetOutputsOutputidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetOutputsOutputidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PutFleetOutputsOutputidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item OutputUnion `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PutFleetOutputsOutputidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetOutputsOutputidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetOutputsOutputidHealthResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Message long message if unhealthy + Message string `json:"message"` + + // State state of output, HEALTHY or DEGRADED + State string `json:"state"` + + // Timestamp timestamp of reported state + Timestamp string `json:"timestamp"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetOutputsOutputidHealthResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetOutputsOutputidHealthResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetPackagePoliciesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []PackagePolicy `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r GetFleetPackagePoliciesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetPackagePoliciesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetPackagePoliciesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item PackagePolicy `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON409 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetPackagePoliciesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetPackagePoliciesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetPackagePoliciesBulkGetResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. + AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions"` + Agents *float32 `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + CreatedBy string `json:"created_by"` + + // Description Package policy description + Description *string `json:"description,omitempty"` + Elasticsearch *PostFleetPackagePoliciesBulkGet_200_Items_Elasticsearch `json:"elasticsearch,omitempty"` + Enabled bool `json:"enabled"` + Id string `json:"id"` + Inputs PostFleetPackagePoliciesBulkGet_200_Items_Inputs `json:"inputs"` + IsManaged *bool `json:"is_managed,omitempty"` + + // Name Package policy name (should be unique) + Name string `json:"name"` + + // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id"` + + // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *struct { + Inputs *map[string]interface{} `json:"inputs,omitempty"` + } `json:"overrides"` + Package *struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` + } `json:"package,omitempty"` + + // PolicyId Agent policy ID where that package policy will be added + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + Revision float32 `json:"revision"` + SecretReferences *[]struct { + Id string `json:"id"` + } `json:"secret_references,omitempty"` + SpaceIds *[]string `json:"spaceIds,omitempty"` + + // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. + SupportsAgentless *bool `json:"supports_agentless"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Vars *PostFleetPackagePoliciesBulkGet_200_Items_Vars `json:"vars,omitempty"` + Version *string `json:"version,omitempty"` + } `json:"items"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON404 *struct { + Message string `json:"message"` + } +} +type PostFleetPackagePoliciesBulkGet_200_Items_Elasticsearch_Privileges struct { + Cluster *[]string `json:"cluster,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesBulkGet_200_Items_Elasticsearch struct { + Privileges *PostFleetPackagePoliciesBulkGet_200_Items_Elasticsearch_Privileges `json:"privileges,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesBulkGet200ItemsInputs0 = []struct { + CompiledInput interface{} `json:"compiled_input"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + PolicyTemplate *string `json:"policy_template,omitempty"` + Streams []struct { + CompiledStream interface{} `json:"compiled_stream"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + DataStream struct { + Dataset string `json:"dataset"` + Elasticsearch *struct { + DynamicDataset *bool `json:"dynamic_dataset,omitempty"` + DynamicNamespace *bool `json:"dynamic_namespace,omitempty"` + Privileges *struct { + Indices *[]string `json:"indices,omitempty"` + } `json:"privileges,omitempty"` + } `json:"elasticsearch,omitempty"` + Type string `json:"type"` + } `json:"data_stream"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + Release *PostFleetPackagePoliciesBulkGet200ItemsInputs0StreamsRelease `json:"release,omitempty"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` + } `json:"streams"` + Type string `json:"type"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` +} +type PostFleetPackagePoliciesBulkGet200ItemsInputs0StreamsRelease string +type PostFleetPackagePoliciesBulkGet200ItemsInputs1 map[string]struct { + // Enabled enable or disable that input, (default to true) + Enabled *bool `json:"enabled,omitempty"` + + // Streams Input streams (see integration documentation to know what streams are available) + Streams *map[string]struct { + // Enabled enable or disable that stream, (default to true) + Enabled *bool `json:"enabled,omitempty"` + + // Vars Input/stream level variable (see integration documentation for more information) + Vars *map[string]*PostFleetPackagePoliciesBulkGet_200_Items_Inputs_1_Streams_Vars_AdditionalProperties `json:"vars,omitempty"` + } `json:"streams,omitempty"` + + // Vars Input/stream level variable (see integration documentation for more information) + Vars *map[string]*PostFleetPackagePoliciesBulkGet_200_Items_Inputs_1_Vars_AdditionalProperties `json:"vars,omitempty"` +} +type PostFleetPackagePoliciesBulkGet200ItemsInputs1StreamsVars0 = bool +type PostFleetPackagePoliciesBulkGet200ItemsInputs1StreamsVars1 = string +type PostFleetPackagePoliciesBulkGet200ItemsInputs1StreamsVars2 = float32 +type PostFleetPackagePoliciesBulkGet200ItemsInputs1StreamsVars3 = []string +type PostFleetPackagePoliciesBulkGet200ItemsInputs1StreamsVars4 = []float32 +type PostFleetPackagePoliciesBulkGet200ItemsInputs1StreamsVars5 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetPackagePoliciesBulkGet_200_Items_Inputs_1_Streams_Vars_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetPackagePoliciesBulkGet200ItemsInputs1Vars0 = bool +type PostFleetPackagePoliciesBulkGet200ItemsInputs1Vars1 = string +type PostFleetPackagePoliciesBulkGet200ItemsInputs1Vars2 = float32 +type PostFleetPackagePoliciesBulkGet200ItemsInputs1Vars3 = []string +type PostFleetPackagePoliciesBulkGet200ItemsInputs1Vars4 = []float32 +type PostFleetPackagePoliciesBulkGet200ItemsInputs1Vars5 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetPackagePoliciesBulkGet_200_Items_Inputs_1_Vars_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetPackagePoliciesBulkGet_200_Items_Inputs struct { + union json.RawMessage +} +type PostFleetPackagePoliciesBulkGet200ItemsVars0 map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` +} +type PostFleetPackagePoliciesBulkGet200ItemsVars1 map[string]*PostFleetPackagePoliciesBulkGet_200_Items_Vars_1_AdditionalProperties +type PostFleetPackagePoliciesBulkGet200ItemsVars10 = bool +type PostFleetPackagePoliciesBulkGet200ItemsVars11 = string +type PostFleetPackagePoliciesBulkGet200ItemsVars12 = float32 +type PostFleetPackagePoliciesBulkGet200ItemsVars13 = []string +type PostFleetPackagePoliciesBulkGet200ItemsVars14 = []float32 +type PostFleetPackagePoliciesBulkGet200ItemsVars15 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetPackagePoliciesBulkGet_200_Items_Vars_1_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetPackagePoliciesBulkGet_200_Items_Vars struct { + union json.RawMessage +} + +// Status returns HTTPResponse.Status +func (r PostFleetPackagePoliciesBulkGetResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetPackagePoliciesBulkGetResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetPackagePoliciesDeleteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]struct { + Body *struct { + Message string `json:"message"` + } `json:"body,omitempty"` + Id string `json:"id"` + Name *string `json:"name,omitempty"` + OutputId *string `json:"output_id"` + Package struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` + } `json:"package"` + + // PolicyId Use `policy_ids` instead + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id"` + PolicyIds []string `json:"policy_ids"` + StatusCode *float32 `json:"statusCode,omitempty"` + Success bool `json:"success"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetPackagePoliciesDeleteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetPackagePoliciesDeleteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetPackagePoliciesUpgradeResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]struct { + Body *struct { + Message string `json:"message"` + } `json:"body,omitempty"` + Id string `json:"id"` + Name *string `json:"name,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + Success bool `json:"success"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r PostFleetPackagePoliciesUpgradeResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetPackagePoliciesUpgradeResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type PostFleetPackagePoliciesUpgradeDryrunResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]struct { + AgentDiff *[][]PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Item `json:"agent_diff,omitempty"` + Body *struct { + Message string `json:"message"` + } `json:"body,omitempty"` + Diff *[]PostFleetPackagePoliciesUpgradeDryrun_200_Diff_Item `json:"diff,omitempty"` + HasErrors bool `json:"hasErrors"` + Name *string `json:"name,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_DataStream struct { + Namespace string `json:"namespace"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Meta_Package struct { + Name string `json:"name"` + Version string `json:"version"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Meta struct { + Package PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Meta_Package `json:"package"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun200AgentDiffProcessorsAddFieldsFields0 = string +type PostFleetPackagePoliciesUpgradeDryrun200AgentDiffProcessorsAddFieldsFields1 = float32 +type PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Processors_AddFields_Fields_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Processors_AddFields struct { + Fields map[string]PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Processors_AddFields_Fields_AdditionalProperties `json:"fields"` + Target string `json:"target"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Processors_Item struct { + AddFields PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Processors_AddFields `json:"add_fields"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Streams_DataStream struct { + Dataset string `json:"dataset"` + Type *string `json:"type,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Streams_Item struct { + DataStream PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Streams_DataStream `json:"data_stream"` + Id *string `json:"id,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Item struct { + DataStream PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_DataStream `json:"data_stream"` + Id string `json:"id"` + Meta *PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Meta `json:"meta,omitempty"` + Name string `json:"name"` + PackagePolicyId string `json:"package_policy_id"` + Processors *[]PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Processors_Item `json:"processors,omitempty"` + Revision float32 `json:"revision"` + Streams *[]PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Streams_Item `json:"streams,omitempty"` + Type string `json:"type"` + UseOutput string `json:"use_output"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun200Diff0 struct { + // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. + AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions"` + Agents *float32 `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + CreatedBy string `json:"created_by"` - // PrivateKeyData Base64-encoded PEM private key content for PKI authentication (Other provider only). Required for PKI. - PrivateKeyData *string `json:"privateKeyData,omitempty"` + // Description Package policy description + Description *string `json:"description,omitempty"` + Elasticsearch *PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Elasticsearch `json:"elasticsearch,omitempty"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + Inputs PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Inputs `json:"inputs"` + IsManaged *bool `json:"is_managed,omitempty"` + + // Name Package policy name (should be unique) + Name string `json:"name"` + + // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id"` + + // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *struct { + Inputs *map[string]interface{} `json:"inputs,omitempty"` + } `json:"overrides"` + Package *struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` + } `json:"package,omitempty"` + + // PolicyId Agent policy ID where that package policy will be added + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + Revision float32 `json:"revision"` + SecretReferences *[]struct { + Id string `json:"id"` + } `json:"secret_references,omitempty"` + SpaceIds *[]string `json:"spaceIds,omitempty"` + + // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. + SupportsAgentless *bool `json:"supports_agentless"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Vars *PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Vars `json:"vars,omitempty"` + Version *string `json:"version,omitempty"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Elasticsearch_Privileges struct { + Cluster *[]string `json:"cluster,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` } +type PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Elasticsearch struct { + Privileges *PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Elasticsearch_Privileges `json:"privileges,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs0 = []struct { + CompiledInput interface{} `json:"compiled_input"` -// GetDataViewsResponseItem defines model for get_data_views_response_item. -type GetDataViewsResponseItem struct { - Id *string `json:"id,omitempty"` - Name *string `json:"name,omitempty"` - Namespaces *[]string `json:"namespaces,omitempty"` - Title *string `json:"title,omitempty"` - TypeMeta *map[string]interface{} `json:"typeMeta,omitempty"` + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + PolicyTemplate *string `json:"policy_template,omitempty"` + Streams []struct { + CompiledStream interface{} `json:"compiled_stream"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + DataStream struct { + Dataset string `json:"dataset"` + Elasticsearch *struct { + DynamicDataset *bool `json:"dynamic_dataset,omitempty"` + DynamicNamespace *bool `json:"dynamic_namespace,omitempty"` + Privileges *struct { + Indices *[]string `json:"indices,omitempty"` + } `json:"privileges,omitempty"` + } `json:"elasticsearch,omitempty"` + Type string `json:"type"` + } `json:"data_stream"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + Release *PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs0StreamsRelease `json:"release,omitempty"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` + } `json:"streams"` + Type string `json:"type"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` } +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs0StreamsRelease string +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1 map[string]struct { + // Enabled enable or disable that input, (default to true) + Enabled *bool `json:"enabled,omitempty"` -// HasAuth If true, a username and password for login type authentication must be provided. -type HasAuth = bool + // Streams Input streams (see integration documentation to know what streams are available) + Streams *map[string]struct { + // Enabled enable or disable that stream, (default to true) + Enabled *bool `json:"enabled,omitempty"` -// IndexConfig Defines properties for connectors when type is `.index`. -type IndexConfig struct { - // ExecutionTimeField A field that indicates when the document was indexed. - ExecutionTimeField *string `json:"executionTimeField,omitempty"` + // Vars Input/stream level variable (see integration documentation for more information) + Vars *map[string]*PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Inputs_1_Streams_Vars_AdditionalProperties `json:"vars,omitempty"` + } `json:"streams,omitempty"` - // Index The Elasticsearch index to be written to. - Index string `json:"index"` + // Vars Input/stream level variable (see integration documentation for more information) + Vars *map[string]*PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Inputs_1_Vars_AdditionalProperties `json:"vars,omitempty"` +} +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1StreamsVars0 = bool +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1StreamsVars1 = string +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1StreamsVars2 = float32 +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1StreamsVars3 = []string +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1StreamsVars4 = []float32 +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1StreamsVars5 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Inputs_1_Streams_Vars_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1Vars0 = bool +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1Vars1 = string +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1Vars2 = float32 +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1Vars3 = []string +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1Vars4 = []float32 +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Inputs1Vars5 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Inputs_1_Vars_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Inputs struct { + union json.RawMessage +} +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Vars0 map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` +} +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Vars1 map[string]*PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Vars_1_AdditionalProperties +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Vars10 = bool +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Vars11 = string +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Vars12 = float32 +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Vars13 = []string +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Vars14 = []float32 +type PostFleetPackagePoliciesUpgradeDryrun200Diff0Vars15 struct { + Id string `json:"id"` + IsSecretRef bool `json:"isSecretRef"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Vars_1_AdditionalProperties struct { + union json.RawMessage +} +type PostFleetPackagePoliciesUpgradeDryrun_200_Diff_0_Vars struct { + union json.RawMessage +} +type PostFleetPackagePoliciesUpgradeDryrun200Diff1 struct { + // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. + AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions"` + CreatedAt *string `json:"created_at,omitempty"` + CreatedBy *string `json:"created_by,omitempty"` - // Refresh The refresh policy for the write request, which affects when changes are made visible to search. Refer to the refresh setting for Elasticsearch document APIs. - Refresh *bool `json:"refresh,omitempty"` + // Description Package policy description + Description *string `json:"description,omitempty"` + Elasticsearch *PostFleetPackagePoliciesUpgradeDryrun_200_Diff_1_Elasticsearch `json:"elasticsearch,omitempty"` + Enabled bool `json:"enabled"` + Errors *[]struct { + Key *string `json:"key,omitempty"` + Message string `json:"message"` + } `json:"errors,omitempty"` + Force *bool `json:"force,omitempty"` + Id *string `json:"id,omitempty"` + Inputs []struct { + CompiledInput interface{} `json:"compiled_input"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + PolicyTemplate *string `json:"policy_template,omitempty"` + Streams []struct { + CompiledStream interface{} `json:"compiled_stream"` + + // Config Package variable (see integration documentation for more information) + Config *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"config,omitempty"` + DataStream struct { + Dataset string `json:"dataset"` + Elasticsearch *struct { + DynamicDataset *bool `json:"dynamic_dataset,omitempty"` + DynamicNamespace *bool `json:"dynamic_namespace,omitempty"` + Privileges *struct { + Indices *[]string `json:"indices,omitempty"` + } `json:"privileges,omitempty"` + } `json:"elasticsearch,omitempty"` + Type string `json:"type"` + } `json:"data_stream"` + Enabled bool `json:"enabled"` + Id *string `json:"id,omitempty"` + KeepEnabled *bool `json:"keep_enabled,omitempty"` + Release *PostFleetPackagePoliciesUpgradeDryrun200Diff1InputsStreamsRelease `json:"release,omitempty"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` + } `json:"streams"` + Type string `json:"type"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` + } `json:"inputs"` + IsManaged *bool `json:"is_managed,omitempty"` + MissingVars *[]string `json:"missingVars,omitempty"` + + // Name Package policy name (should be unique) + Name string `json:"name"` + + // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id"` + + // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *struct { + Inputs *map[string]interface{} `json:"inputs,omitempty"` + } `json:"overrides"` + Package *struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` + } `json:"package,omitempty"` + + // PolicyId Agent policy ID where that package policy will be added + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + Revision *float32 `json:"revision,omitempty"` + SecretReferences *[]struct { + Id string `json:"id"` + } `json:"secret_references,omitempty"` + + // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. + SupportsAgentless *bool `json:"supports_agentless"` + UpdatedAt *string `json:"updated_at,omitempty"` + UpdatedBy *string `json:"updated_by,omitempty"` + + // Vars Package variable (see integration documentation for more information) + Vars *map[string]struct { + Frozen *bool `json:"frozen,omitempty"` + Type *string `json:"type,omitempty"` + Value interface{} `json:"value"` + } `json:"vars,omitempty"` + Version *string `json:"version,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_Diff_1_Elasticsearch_Privileges struct { + Cluster *[]string `json:"cluster,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun_200_Diff_1_Elasticsearch struct { + Privileges *PostFleetPackagePoliciesUpgradeDryrun_200_Diff_1_Elasticsearch_Privileges `json:"privileges,omitempty"` + AdditionalProperties map[string]interface{} `json:"-"` +} +type PostFleetPackagePoliciesUpgradeDryrun200Diff1InputsStreamsRelease string +type PostFleetPackagePoliciesUpgradeDryrun_200_Diff_Item struct { + union json.RawMessage } -// JiraConfig Defines properties for connectors when type is `.jira`. -type JiraConfig struct { - // ApiUrl The Jira instance URL. - ApiUrl string `json:"apiUrl"` +// Status returns HTTPResponse.Status +func (r PostFleetPackagePoliciesUpgradeDryrunResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // ProjectKey The Jira project key. - ProjectKey string `json:"projectKey"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetPackagePoliciesUpgradeDryrunResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// JiraSecrets Defines secrets for connectors when type is `.jira`. -type JiraSecrets struct { - // ApiToken The Jira API authentication token for HTTP basic authentication. - ApiToken string `json:"apiToken"` +type DeleteFleetPackagePoliciesPackagepolicyidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Id string `json:"id"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} - // Email The account email for HTTP Basic authentication. - Email string `json:"email"` +// Status returns HTTPResponse.Status +func (r DeleteFleetPackagePoliciesPackagepolicyidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// Key If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the KEY file. -type Key = string +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// NewOutputElasticsearch defines model for new_output_elasticsearch. -type NewOutputElasticsearch struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts []string `json:"hosts"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Name string `json:"name"` - Preset *NewOutputElasticsearchPreset `json:"preset,omitempty"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *struct { - Ssl *struct { - Key *NewOutputElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - Shipper *NewOutputShipper `json:"shipper,omitempty"` - Ssl *NewOutputSsl `json:"ssl,omitempty"` - Type NewOutputElasticsearchType `json:"type"` +type GetFleetPackagePoliciesPackagepolicyidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item PackagePolicy `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON404 *struct { + Message string `json:"message"` + } } -// NewOutputElasticsearchPreset defines model for NewOutputElasticsearch.Preset. -type NewOutputElasticsearchPreset string +// Status returns HTTPResponse.Status +func (r GetFleetPackagePoliciesPackagepolicyidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// NewOutputElasticsearchSecretsSslKey0 defines model for . -type NewOutputElasticsearchSecretsSslKey0 struct { - Id string `json:"id"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// NewOutputElasticsearchSecretsSslKey1 defines model for . -type NewOutputElasticsearchSecretsSslKey1 = string +type PutFleetPackagePoliciesPackagepolicyidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item PackagePolicy `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON403 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} -// NewOutputElasticsearch_Secrets_Ssl_Key defines model for NewOutputElasticsearch.Secrets.Ssl.Key. -type NewOutputElasticsearch_Secrets_Ssl_Key struct { +// Status returns HTTPResponse.Status +func (r PutFleetPackagePoliciesPackagepolicyidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type GetFleetProxiesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + Certificate *string `json:"certificate"` + CertificateAuthorities *string `json:"certificate_authorities"` + CertificateKey *string `json:"certificate_key"` + Id string `json:"id"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyHeaders *map[string]GetFleetProxies_200_Items_ProxyHeaders_AdditionalProperties `json:"proxy_headers"` + Url string `json:"url"` + } `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetProxies200ItemsProxyHeaders0 = string +type GetFleetProxies200ItemsProxyHeaders1 = bool +type GetFleetProxies200ItemsProxyHeaders2 = float32 +type GetFleetProxies_200_Items_ProxyHeaders_AdditionalProperties struct { union json.RawMessage } -// NewOutputElasticsearchType defines model for NewOutputElasticsearch.Type. -type NewOutputElasticsearchType string +// Status returns HTTPResponse.Status +func (r GetFleetProxiesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// NewOutputKafka defines model for new_output_kafka. -type NewOutputKafka struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - AuthType NewOutputKafkaAuthType `json:"auth_type"` - BrokerTimeout *float32 `json:"broker_timeout,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ClientId *string `json:"client_id,omitempty"` - Compression *NewOutputKafkaCompression `json:"compression,omitempty"` - CompressionLevel interface{} `json:"compression_level"` - ConfigYaml *string `json:"config_yaml,omitempty"` - ConnectionType interface{} `json:"connection_type"` - Hash *struct { - Hash *string `json:"hash,omitempty"` - Random *bool `json:"random,omitempty"` - } `json:"hash,omitempty"` - Headers *[]struct { - Key string `json:"key"` - Value string `json:"value"` - } `json:"headers,omitempty"` - Hosts []string `json:"hosts"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Key *string `json:"key,omitempty"` - Name string `json:"name"` - Partition *NewOutputKafkaPartition `json:"partition,omitempty"` - Password interface{} `json:"password"` - ProxyId *string `json:"proxy_id,omitempty"` - Random *struct { - GroupEvents *float32 `json:"group_events,omitempty"` - } `json:"random,omitempty"` - RequiredAcks *NewOutputKafkaRequiredAcks `json:"required_acks,omitempty"` - RoundRobin *struct { - GroupEvents *float32 `json:"group_events,omitempty"` - } `json:"round_robin,omitempty"` - Sasl *struct { - Mechanism *NewOutputKafkaSaslMechanism `json:"mechanism,omitempty"` - } `json:"sasl,omitempty"` - Secrets *struct { - Password *NewOutputKafka_Secrets_Password `json:"password,omitempty"` - Ssl *struct { - Key NewOutputKafka_Secrets_Ssl_Key `json:"key"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - Shipper *NewOutputShipper `json:"shipper,omitempty"` - Ssl *NewOutputSsl `json:"ssl,omitempty"` - Timeout *float32 `json:"timeout,omitempty"` - Topic *string `json:"topic,omitempty"` - Type NewOutputKafkaType `json:"type"` - Username interface{} `json:"username"` - Version *string `json:"version,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetProxiesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// NewOutputKafkaAuthType defines model for NewOutputKafka.AuthType. -type NewOutputKafkaAuthType string +type PostFleetProxiesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + Certificate *string `json:"certificate"` + CertificateAuthorities *string `json:"certificate_authorities"` + CertificateKey *string `json:"certificate_key"` + Id string `json:"id"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyHeaders *map[string]PostFleetProxies_200_Item_ProxyHeaders_AdditionalProperties `json:"proxy_headers"` + Url string `json:"url"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PostFleetProxies200ItemProxyHeaders0 = string +type PostFleetProxies200ItemProxyHeaders1 = bool +type PostFleetProxies200ItemProxyHeaders2 = float32 +type PostFleetProxies_200_Item_ProxyHeaders_AdditionalProperties struct { + union json.RawMessage +} -// NewOutputKafkaCompression defines model for NewOutputKafka.Compression. -type NewOutputKafkaCompression string +// Status returns HTTPResponse.Status +func (r PostFleetProxiesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// NewOutputKafkaPartition defines model for NewOutputKafka.Partition. -type NewOutputKafkaPartition string +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetProxiesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} + +type DeleteFleetProxiesItemidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Id string `json:"id"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} + +// Status returns HTTPResponse.Status +func (r DeleteFleetProxiesItemidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// NewOutputKafkaRequiredAcks defines model for NewOutputKafka.RequiredAcks. -type NewOutputKafkaRequiredAcks int +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteFleetProxiesItemidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// NewOutputKafkaSaslMechanism defines model for NewOutputKafka.Sasl.Mechanism. -type NewOutputKafkaSaslMechanism string +type GetFleetProxiesItemidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + Certificate *string `json:"certificate"` + CertificateAuthorities *string `json:"certificate_authorities"` + CertificateKey *string `json:"certificate_key"` + Id string `json:"id"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyHeaders *map[string]GetFleetProxiesItemid_200_Item_ProxyHeaders_AdditionalProperties `json:"proxy_headers"` + Url string `json:"url"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type GetFleetProxiesItemid200ItemProxyHeaders0 = string +type GetFleetProxiesItemid200ItemProxyHeaders1 = bool +type GetFleetProxiesItemid200ItemProxyHeaders2 = float32 +type GetFleetProxiesItemid_200_Item_ProxyHeaders_AdditionalProperties struct { + union json.RawMessage +} -// NewOutputKafkaSecretsPassword0 defines model for . -type NewOutputKafkaSecretsPassword0 struct { - Id string `json:"id"` +// Status returns HTTPResponse.Status +func (r GetFleetProxiesItemidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// NewOutputKafkaSecretsPassword1 defines model for . -type NewOutputKafkaSecretsPassword1 = string +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetProxiesItemidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// NewOutputKafka_Secrets_Password defines model for NewOutputKafka.Secrets.Password. -type NewOutputKafka_Secrets_Password struct { +type PutFleetProxiesItemidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + Certificate *string `json:"certificate"` + CertificateAuthorities *string `json:"certificate_authorities"` + CertificateKey *string `json:"certificate_key"` + Id string `json:"id"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyHeaders *map[string]PutFleetProxiesItemid_200_Item_ProxyHeaders_AdditionalProperties `json:"proxy_headers"` + Url string `json:"url"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} +type PutFleetProxiesItemid200ItemProxyHeaders0 = string +type PutFleetProxiesItemid200ItemProxyHeaders1 = bool +type PutFleetProxiesItemid200ItemProxyHeaders2 = float32 +type PutFleetProxiesItemid_200_Item_ProxyHeaders_AdditionalProperties struct { union json.RawMessage } -// NewOutputKafkaSecretsSslKey0 defines model for . -type NewOutputKafkaSecretsSslKey0 struct { - Id string `json:"id"` +// Status returns HTTPResponse.Status +func (r PutFleetProxiesItemidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// NewOutputKafkaSecretsSslKey1 defines model for . -type NewOutputKafkaSecretsSslKey1 = string +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetProxiesItemidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// NewOutputKafka_Secrets_Ssl_Key defines model for NewOutputKafka.Secrets.Ssl.Key. -type NewOutputKafka_Secrets_Ssl_Key struct { - union json.RawMessage +type GetFleetRemoteSyncedIntegrationsStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + CustomAssets *map[string]struct { + Error *string `json:"error,omitempty"` + IsDeleted *bool `json:"is_deleted,omitempty"` + Name string `json:"name"` + PackageName string `json:"package_name"` + PackageVersion string `json:"package_version"` + SyncStatus GetFleetRemoteSyncedIntegrationsStatus200CustomAssetsSyncStatus `json:"sync_status"` + Type string `json:"type"` + } `json:"custom_assets,omitempty"` + Error *string `json:"error,omitempty"` + Integrations []struct { + Error *string `json:"error,omitempty"` + Id *string `json:"id,omitempty"` + InstallStatus struct { + Main string `json:"main"` + Remote *string `json:"remote,omitempty"` + } `json:"install_status"` + PackageName *string `json:"package_name,omitempty"` + PackageVersion *string `json:"package_version,omitempty"` + SyncStatus GetFleetRemoteSyncedIntegrationsStatus200IntegrationsSyncStatus `json:"sync_status"` + UpdatedAt *string `json:"updated_at,omitempty"` + Warning *struct { + Message *string `json:"message,omitempty"` + Title string `json:"title"` + } `json:"warning,omitempty"` + } `json:"integrations"` + Warning *struct { + Message *string `json:"message,omitempty"` + Title string `json:"title"` + } `json:"warning,omitempty"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } +type GetFleetRemoteSyncedIntegrationsStatus200CustomAssetsSyncStatus string +type GetFleetRemoteSyncedIntegrationsStatus200IntegrationsSyncStatus string -// NewOutputKafkaType defines model for NewOutputKafka.Type. -type NewOutputKafkaType string +// Status returns HTTPResponse.Status +func (r GetFleetRemoteSyncedIntegrationsStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// NewOutputLogstash defines model for new_output_logstash. -type NewOutputLogstash struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts []string `json:"hosts"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Name string `json:"name"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *struct { - Ssl *struct { - Key *NewOutputLogstash_Secrets_Ssl_Key `json:"key,omitempty"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - Shipper *NewOutputShipper `json:"shipper,omitempty"` - Ssl *NewOutputSsl `json:"ssl,omitempty"` - Type NewOutputLogstashType `json:"type"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetRemoteSyncedIntegrationsStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// NewOutputLogstashSecretsSslKey0 defines model for . -type NewOutputLogstashSecretsSslKey0 struct { - Id string `json:"id"` +type GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + CustomAssets *map[string]struct { + Error *string `json:"error,omitempty"` + IsDeleted *bool `json:"is_deleted,omitempty"` + Name string `json:"name"` + PackageName string `json:"package_name"` + PackageVersion string `json:"package_version"` + SyncStatus GetFleetRemoteSyncedIntegrationsOutputidRemoteStatus200CustomAssetsSyncStatus `json:"sync_status"` + Type string `json:"type"` + } `json:"custom_assets,omitempty"` + Error *string `json:"error,omitempty"` + Integrations []struct { + Error *string `json:"error,omitempty"` + Id *string `json:"id,omitempty"` + InstallStatus struct { + Main string `json:"main"` + Remote *string `json:"remote,omitempty"` + } `json:"install_status"` + PackageName *string `json:"package_name,omitempty"` + PackageVersion *string `json:"package_version,omitempty"` + SyncStatus GetFleetRemoteSyncedIntegrationsOutputidRemoteStatus200IntegrationsSyncStatus `json:"sync_status"` + UpdatedAt *string `json:"updated_at,omitempty"` + Warning *struct { + Message *string `json:"message,omitempty"` + Title string `json:"title"` + } `json:"warning,omitempty"` + } `json:"integrations"` + Warning *struct { + Message *string `json:"message,omitempty"` + Title string `json:"title"` + } `json:"warning,omitempty"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } +type GetFleetRemoteSyncedIntegrationsOutputidRemoteStatus200CustomAssetsSyncStatus string +type GetFleetRemoteSyncedIntegrationsOutputidRemoteStatus200IntegrationsSyncStatus string -// NewOutputLogstashSecretsSslKey1 defines model for . -type NewOutputLogstashSecretsSslKey1 = string +// Status returns HTTPResponse.Status +func (r GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// NewOutputLogstash_Secrets_Ssl_Key defines model for NewOutputLogstash.Secrets.Ssl.Key. -type NewOutputLogstash_Secrets_Ssl_Key struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// NewOutputLogstashType defines model for NewOutputLogstash.Type. -type NewOutputLogstashType string +type PostFleetServiceTokensResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Name string `json:"name"` + Value string `json:"value"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} -// NewOutputRemoteElasticsearch defines model for new_output_remote_elasticsearch. -type NewOutputRemoteElasticsearch struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts []string `json:"hosts"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - KibanaApiKey *string `json:"kibana_api_key,omitempty"` - KibanaUrl *string `json:"kibana_url,omitempty"` - Name string `json:"name"` - Preset *NewOutputRemoteElasticsearchPreset `json:"preset,omitempty"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *struct { - ServiceToken *NewOutputRemoteElasticsearch_Secrets_ServiceToken `json:"service_token,omitempty"` - Ssl *struct { - Key *NewOutputRemoteElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - ServiceToken *string `json:"service_token,omitempty"` - Shipper *NewOutputShipper `json:"shipper,omitempty"` - Ssl *NewOutputSsl `json:"ssl,omitempty"` - SyncIntegrations *bool `json:"sync_integrations,omitempty"` - SyncUninstalledIntegrations *bool `json:"sync_uninstalled_integrations,omitempty"` - Type NewOutputRemoteElasticsearchType `json:"type"` +// Status returns HTTPResponse.Status +func (r PostFleetServiceTokensResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// NewOutputRemoteElasticsearchPreset defines model for NewOutputRemoteElasticsearch.Preset. -type NewOutputRemoteElasticsearchPreset string +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetServiceTokensResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// NewOutputRemoteElasticsearchSecretsServiceToken0 defines model for . -type NewOutputRemoteElasticsearchSecretsServiceToken0 struct { - Id string `json:"id"` +type GetFleetSettingsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + DeleteUnenrolledAgents *struct { + Enabled bool `json:"enabled"` + IsPreconfigured bool `json:"is_preconfigured"` + } `json:"delete_unenrolled_agents,omitempty"` + HasSeenAddDataNotice *bool `json:"has_seen_add_data_notice,omitempty"` + Id string `json:"id"` + OutputSecretStorageRequirementsMet *bool `json:"output_secret_storage_requirements_met,omitempty"` + PreconfiguredFields *[]GetFleetSettings200ItemPreconfiguredFields `json:"preconfigured_fields,omitempty"` + PrereleaseIntegrationsEnabled *bool `json:"prerelease_integrations_enabled,omitempty"` + SecretStorageRequirementsMet *bool `json:"secret_storage_requirements_met,omitempty"` + UseSpaceAwarenessMigrationStartedAt *string `json:"use_space_awareness_migration_started_at"` + UseSpaceAwarenessMigrationStatus *GetFleetSettings200ItemUseSpaceAwarenessMigrationStatus `json:"use_space_awareness_migration_status,omitempty"` + Version *string `json:"version,omitempty"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON404 *struct { + Message string `json:"message"` + } } +type GetFleetSettings200ItemPreconfiguredFields string +type GetFleetSettings200ItemUseSpaceAwarenessMigrationStatus string -// NewOutputRemoteElasticsearchSecretsServiceToken1 defines model for . -type NewOutputRemoteElasticsearchSecretsServiceToken1 = string +// Status returns HTTPResponse.Status +func (r GetFleetSettingsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// NewOutputRemoteElasticsearch_Secrets_ServiceToken defines model for NewOutputRemoteElasticsearch.Secrets.ServiceToken. -type NewOutputRemoteElasticsearch_Secrets_ServiceToken struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetSettingsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// NewOutputRemoteElasticsearchSecretsSslKey0 defines model for . -type NewOutputRemoteElasticsearchSecretsSslKey0 struct { - Id string `json:"id"` +type PutFleetSettingsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + DeleteUnenrolledAgents *struct { + Enabled bool `json:"enabled"` + IsPreconfigured bool `json:"is_preconfigured"` + } `json:"delete_unenrolled_agents,omitempty"` + HasSeenAddDataNotice *bool `json:"has_seen_add_data_notice,omitempty"` + Id string `json:"id"` + OutputSecretStorageRequirementsMet *bool `json:"output_secret_storage_requirements_met,omitempty"` + PreconfiguredFields *[]PutFleetSettings200ItemPreconfiguredFields `json:"preconfigured_fields,omitempty"` + PrereleaseIntegrationsEnabled *bool `json:"prerelease_integrations_enabled,omitempty"` + SecretStorageRequirementsMet *bool `json:"secret_storage_requirements_met,omitempty"` + UseSpaceAwarenessMigrationStartedAt *string `json:"use_space_awareness_migration_started_at"` + UseSpaceAwarenessMigrationStatus *PutFleetSettings200ItemUseSpaceAwarenessMigrationStatus `json:"use_space_awareness_migration_status,omitempty"` + Version *string `json:"version,omitempty"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON404 *struct { + Message string `json:"message"` + } } +type PutFleetSettings200ItemPreconfiguredFields string +type PutFleetSettings200ItemUseSpaceAwarenessMigrationStatus string -// NewOutputRemoteElasticsearchSecretsSslKey1 defines model for . -type NewOutputRemoteElasticsearchSecretsSslKey1 = string +// Status returns HTTPResponse.Status +func (r PutFleetSettingsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// NewOutputRemoteElasticsearch_Secrets_Ssl_Key defines model for NewOutputRemoteElasticsearch.Secrets.Ssl.Key. -type NewOutputRemoteElasticsearch_Secrets_Ssl_Key struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetSettingsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// NewOutputRemoteElasticsearchType defines model for NewOutputRemoteElasticsearch.Type. -type NewOutputRemoteElasticsearchType string +type PostFleetSetupResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + IsInitialized bool `json:"isInitialized"` + NonFatalErrors []struct { + Message string `json:"message"` + Name string `json:"name"` + } `json:"nonFatalErrors"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON500 *struct { + Message string `json:"message"` + } +} -// NewOutputShipper defines model for new_output_shipper. -type NewOutputShipper struct { - CompressionLevel *float32 `json:"compression_level,omitempty"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` - DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` - DiskQueuePath *string `json:"disk_queue_path,omitempty"` - Loadbalance *bool `json:"loadbalance,omitempty"` - MaxBatchBytes *float32 `json:"max_batch_bytes,omitempty"` - MemQueueEvents *float32 `json:"mem_queue_events,omitempty"` - QueueFlushTimeout *float32 `json:"queue_flush_timeout,omitempty"` +// Status returns HTTPResponse.Status +func (r PostFleetSetupResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// NewOutputSsl defines model for new_output_ssl. -type NewOutputSsl struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - VerificationMode *NewOutputSslVerificationMode `json:"verification_mode,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostFleetSetupResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// NewOutputSslVerificationMode defines model for NewOutputSsl.VerificationMode. -type NewOutputSslVerificationMode string +type GetFleetSpaceSettingsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + AllowedNamespacePrefixes []string `json:"allowed_namespace_prefixes"` + ManagedBy *string `json:"managed_by,omitempty"` + } `json:"item"` + } +} -// NewOutputUnion defines model for new_output_union. -type NewOutputUnion struct { - union json.RawMessage +// Status returns HTTPResponse.Status +func (r GetFleetSpaceSettingsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// OpsgenieConfig Defines properties for connectors when type is `.opsgenie`. -type OpsgenieConfig struct { - // ApiUrl The Opsgenie URL. For example, `https://api.opsgenie.com` or `https://api.eu.opsgenie.com`. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - ApiUrl string `json:"apiUrl"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetSpaceSettingsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OpsgenieSecrets Defines secrets for connectors when type is `.opsgenie`. -type OpsgenieSecrets struct { - // ApiKey The Opsgenie API authentication key for HTTP Basic authentication. - ApiKey string `json:"apiKey"` +type PutFleetSpaceSettingsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + AllowedNamespacePrefixes []string `json:"allowed_namespace_prefixes"` + ManagedBy *string `json:"managed_by,omitempty"` + } `json:"item"` + } } -// OutputElasticsearch defines model for output_elasticsearch. -type OutputElasticsearch struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts []string `json:"hosts"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Name string `json:"name"` - Preset *OutputElasticsearchPreset `json:"preset,omitempty"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *OutputElasticsearch_Secrets `json:"secrets,omitempty"` - Shipper *OutputShipper `json:"shipper,omitempty"` - Ssl *OutputSsl `json:"ssl,omitempty"` - Type OutputElasticsearchType `json:"type"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r PutFleetSpaceSettingsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// OutputElasticsearchPreset defines model for OutputElasticsearch.Preset. -type OutputElasticsearchPreset string +// StatusCode returns HTTPResponse.StatusCode +func (r PutFleetSpaceSettingsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// OutputElasticsearchSecretsSslKey0 defines model for . -type OutputElasticsearchSecretsSslKey0 struct { - Id string `json:"id"` - AdditionalProperties map[string]interface{} `json:"-"` +type GetFleetUninstallTokensResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Items []struct { + CreatedAt string `json:"created_at"` + Id string `json:"id"` + Namespaces *[]string `json:"namespaces,omitempty"` + PolicyId string `json:"policy_id"` + PolicyName *string `json:"policy_name"` + } `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } -// OutputElasticsearchSecretsSslKey1 defines model for . -type OutputElasticsearchSecretsSslKey1 = string +// Status returns HTTPResponse.Status +func (r GetFleetUninstallTokensResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// OutputElasticsearch_Secrets_Ssl_Key defines model for OutputElasticsearch.Secrets.Ssl.Key. -type OutputElasticsearch_Secrets_Ssl_Key struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetUninstallTokensResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OutputElasticsearch_Secrets_Ssl defines model for OutputElasticsearch.Secrets.Ssl. -type OutputElasticsearch_Secrets_Ssl struct { - Key *OutputElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type GetFleetUninstallTokensUninstalltokenidResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Item struct { + CreatedAt string `json:"created_at"` + Id string `json:"id"` + Namespaces *[]string `json:"namespaces,omitempty"` + PolicyId string `json:"policy_id"` + PolicyName *string `json:"policy_name"` + Token string `json:"token"` + } `json:"item"` + } + JSON400 *struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } -// OutputElasticsearch_Secrets defines model for OutputElasticsearch.Secrets. -type OutputElasticsearch_Secrets struct { - Ssl *OutputElasticsearch_Secrets_Ssl `json:"ssl,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r GetFleetUninstallTokensUninstalltokenidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// OutputElasticsearchType defines model for OutputElasticsearch.Type. -type OutputElasticsearchType string +// StatusCode returns HTTPResponse.StatusCode +func (r GetFleetUninstallTokensUninstalltokenidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// OutputKafka defines model for output_kafka. -type OutputKafka struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - AuthType OutputKafkaAuthType `json:"auth_type"` - BrokerTimeout *float32 `json:"broker_timeout,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ClientId *string `json:"client_id,omitempty"` - Compression *OutputKafkaCompression `json:"compression,omitempty"` - CompressionLevel interface{} `json:"compression_level"` - ConfigYaml *string `json:"config_yaml,omitempty"` - ConnectionType interface{} `json:"connection_type"` - Hash *OutputKafka_Hash `json:"hash,omitempty"` - Headers *[]OutputKafka_Headers_Item `json:"headers,omitempty"` - Hosts []string `json:"hosts"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Key *string `json:"key,omitempty"` - Name string `json:"name"` - Partition *OutputKafkaPartition `json:"partition,omitempty"` - Password interface{} `json:"password"` - ProxyId *string `json:"proxy_id,omitempty"` - Random *OutputKafka_Random `json:"random,omitempty"` - RequiredAcks *OutputKafkaRequiredAcks `json:"required_acks,omitempty"` - RoundRobin *OutputKafka_RoundRobin `json:"round_robin,omitempty"` - Sasl *OutputKafka_Sasl `json:"sasl,omitempty"` - Secrets *OutputKafka_Secrets `json:"secrets,omitempty"` - Shipper *OutputShipper `json:"shipper,omitempty"` - Ssl *OutputSsl `json:"ssl,omitempty"` - Timeout *float32 `json:"timeout,omitempty"` - Topic *string `json:"topic,omitempty"` - Type OutputKafkaType `json:"type"` - Username interface{} `json:"username"` - Version *string `json:"version,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type DeleteListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityListsAPIList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } -// OutputKafkaAuthType defines model for OutputKafka.AuthType. -type OutputKafkaAuthType string +// Status returns HTTPResponse.Status +func (r DeleteListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// OutputKafkaCompression defines model for OutputKafka.Compression. -type OutputKafkaCompression string +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// OutputKafka_Hash defines model for OutputKafka.Hash. -type OutputKafka_Hash struct { - Hash *string `json:"hash,omitempty"` - Random *bool `json:"random,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type ReadListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityListsAPIList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } -// OutputKafka_Headers_Item defines model for output_kafka.headers.Item. -type OutputKafka_Headers_Item struct { - Key string `json:"key"` - Value string `json:"value"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r ReadListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// OutputKafkaPartition defines model for OutputKafka.Partition. -type OutputKafkaPartition string +// StatusCode returns HTTPResponse.StatusCode +func (r ReadListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// OutputKafka_Random defines model for OutputKafka.Random. -type OutputKafka_Random struct { - GroupEvents *float32 `json:"group_events,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type PatchListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityListsAPIList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } -// OutputKafkaRequiredAcks defines model for OutputKafka.RequiredAcks. -type OutputKafkaRequiredAcks int +// Status returns HTTPResponse.Status +func (r PatchListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// OutputKafka_RoundRobin defines model for OutputKafka.RoundRobin. -type OutputKafka_RoundRobin struct { - GroupEvents *float32 `json:"group_events,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r PatchListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OutputKafkaSaslMechanism defines model for OutputKafka.Sasl.Mechanism. -type OutputKafkaSaslMechanism string +type CreateListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityListsAPIList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON409 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse +} -// OutputKafka_Sasl defines model for OutputKafka.Sasl. -type OutputKafka_Sasl struct { - Mechanism *OutputKafkaSaslMechanism `json:"mechanism,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r CreateListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// OutputKafkaSecretsPassword0 defines model for . -type OutputKafkaSecretsPassword0 struct { - Id string `json:"id"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r CreateListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OutputKafkaSecretsPassword1 defines model for . -type OutputKafkaSecretsPassword1 = string +type UpdateListResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityListsAPIList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse +} -// OutputKafka_Secrets_Password defines model for OutputKafka.Secrets.Password. -type OutputKafka_Secrets_Password struct { - union json.RawMessage +// Status returns HTTPResponse.Status +func (r UpdateListResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// OutputKafkaSecretsSslKey0 defines model for . -type OutputKafkaSecretsSslKey0 struct { - Id string `json:"id"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateListResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OutputKafkaSecretsSslKey1 defines model for . -type OutputKafkaSecretsSslKey1 = string +type FindListsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Cursor SecurityListsAPIFindListsCursor `json:"cursor"` + Data []SecurityListsAPIList `json:"data"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Total int `json:"total"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse +} -// OutputKafka_Secrets_Ssl_Key defines model for OutputKafka.Secrets.Ssl.Key. -type OutputKafka_Secrets_Ssl_Key struct { - union json.RawMessage +// Status returns HTTPResponse.Status +func (r FindListsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// OutputKafka_Secrets_Ssl defines model for OutputKafka.Secrets.Ssl. -type OutputKafka_Secrets_Ssl struct { - Key OutputKafka_Secrets_Ssl_Key `json:"key"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r FindListsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OutputKafka_Secrets defines model for OutputKafka.Secrets. -type OutputKafka_Secrets struct { - Password *OutputKafka_Secrets_Password `json:"password,omitempty"` - Ssl *OutputKafka_Secrets_Ssl `json:"ssl,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type DeleteListIndexResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Acknowledged bool `json:"acknowledged"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } -// OutputKafkaType defines model for OutputKafka.Type. -type OutputKafkaType string +// Status returns HTTPResponse.Status +func (r DeleteListIndexResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// OutputLogstash defines model for output_logstash. -type OutputLogstash struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts []string `json:"hosts"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Name string `json:"name"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *OutputLogstash_Secrets `json:"secrets,omitempty"` - Shipper *OutputShipper `json:"shipper,omitempty"` - Ssl *OutputSsl `json:"ssl,omitempty"` - Type OutputLogstashType `json:"type"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteListIndexResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OutputLogstashSecretsSslKey0 defines model for . -type OutputLogstashSecretsSslKey0 struct { - Id string `json:"id"` - AdditionalProperties map[string]interface{} `json:"-"` +type ReadListIndexResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + ListIndex bool `json:"list_index"` + ListItemIndex bool `json:"list_item_index"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } -// OutputLogstashSecretsSslKey1 defines model for . -type OutputLogstashSecretsSslKey1 = string +// Status returns HTTPResponse.Status +func (r ReadListIndexResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// OutputLogstash_Secrets_Ssl_Key defines model for OutputLogstash.Secrets.Ssl.Key. -type OutputLogstash_Secrets_Ssl_Key struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r ReadListIndexResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OutputLogstash_Secrets_Ssl defines model for OutputLogstash.Secrets.Ssl. -type OutputLogstash_Secrets_Ssl struct { - Key *OutputLogstash_Secrets_Ssl_Key `json:"key,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type CreateListIndexResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Acknowledged bool `json:"acknowledged"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON409 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } -// OutputLogstash_Secrets defines model for OutputLogstash.Secrets. -type OutputLogstash_Secrets struct { - Ssl *OutputLogstash_Secrets_Ssl `json:"ssl,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r CreateListIndexResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// OutputLogstashType defines model for OutputLogstash.Type. -type OutputLogstashType string +// StatusCode returns HTTPResponse.StatusCode +func (r CreateListIndexResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// OutputRemoteElasticsearch defines model for output_remote_elasticsearch. -type OutputRemoteElasticsearch struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts []string `json:"hosts"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - KibanaApiKey *string `json:"kibana_api_key,omitempty"` - KibanaUrl *string `json:"kibana_url,omitempty"` - Name string `json:"name"` - Preset *OutputRemoteElasticsearchPreset `json:"preset,omitempty"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *OutputRemoteElasticsearch_Secrets `json:"secrets,omitempty"` - ServiceToken *string `json:"service_token,omitempty"` - Shipper *OutputShipper `json:"shipper,omitempty"` - Ssl *OutputSsl `json:"ssl,omitempty"` - SyncIntegrations *bool `json:"sync_integrations,omitempty"` - SyncUninstalledIntegrations *bool `json:"sync_uninstalled_integrations,omitempty"` - Type OutputRemoteElasticsearchType `json:"type"` - AdditionalProperties map[string]interface{} `json:"-"` +type DeleteListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + union json.RawMessage + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } +type DeleteListItem2001 = []SecurityListsAPIListItem -// OutputRemoteElasticsearchPreset defines model for OutputRemoteElasticsearch.Preset. -type OutputRemoteElasticsearchPreset string +// Status returns HTTPResponse.Status +func (r DeleteListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// OutputRemoteElasticsearchSecretsServiceToken0 defines model for . -type OutputRemoteElasticsearchSecretsServiceToken0 struct { - Id string `json:"id"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OutputRemoteElasticsearchSecretsServiceToken1 defines model for . -type OutputRemoteElasticsearchSecretsServiceToken1 = string +type ReadListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + union json.RawMessage + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse +} +type ReadListItem2001 = []SecurityListsAPIListItem -// OutputRemoteElasticsearch_Secrets_ServiceToken defines model for OutputRemoteElasticsearch.Secrets.ServiceToken. -type OutputRemoteElasticsearch_Secrets_ServiceToken struct { - union json.RawMessage +// Status returns HTTPResponse.Status +func (r ReadListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// OutputRemoteElasticsearchSecretsSslKey0 defines model for . -type OutputRemoteElasticsearchSecretsSslKey0 struct { - Id string `json:"id"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r ReadListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OutputRemoteElasticsearchSecretsSslKey1 defines model for . -type OutputRemoteElasticsearchSecretsSslKey1 = string +type PatchListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityListsAPIListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse +} -// OutputRemoteElasticsearch_Secrets_Ssl_Key defines model for OutputRemoteElasticsearch.Secrets.Ssl.Key. -type OutputRemoteElasticsearch_Secrets_Ssl_Key struct { - union json.RawMessage +// Status returns HTTPResponse.Status +func (r PatchListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// OutputRemoteElasticsearch_Secrets_Ssl defines model for OutputRemoteElasticsearch.Secrets.Ssl. -type OutputRemoteElasticsearch_Secrets_Ssl struct { - Key *OutputRemoteElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r PatchListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OutputRemoteElasticsearch_Secrets defines model for OutputRemoteElasticsearch.Secrets. -type OutputRemoteElasticsearch_Secrets struct { - ServiceToken *OutputRemoteElasticsearch_Secrets_ServiceToken `json:"service_token,omitempty"` - Ssl *OutputRemoteElasticsearch_Secrets_Ssl `json:"ssl,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type CreateListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityListsAPIListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPIPlatformErrorResponse + JSON409 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } -// OutputRemoteElasticsearchType defines model for OutputRemoteElasticsearch.Type. -type OutputRemoteElasticsearchType string +// Status returns HTTPResponse.Status +func (r CreateListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// OutputShipper defines model for output_shipper. -type OutputShipper struct { - CompressionLevel *float32 `json:"compression_level,omitempty"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` - DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` - DiskQueuePath *string `json:"disk_queue_path,omitempty"` - Loadbalance *bool `json:"loadbalance,omitempty"` - MaxBatchBytes *float32 `json:"max_batch_bytes,omitempty"` - MemQueueEvents *float32 `json:"mem_queue_events,omitempty"` - QueueFlushTimeout *float32 `json:"queue_flush_timeout,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r CreateListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// OutputSsl defines model for output_ssl. -type OutputSsl struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - VerificationMode *OutputSslVerificationMode `json:"verification_mode,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type UpdateListItemResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityListsAPIListItem + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } -// OutputSslVerificationMode defines model for OutputSsl.VerificationMode. -type OutputSslVerificationMode string +// Status returns HTTPResponse.Status +func (r UpdateListItemResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// OutputUnion defines model for output_union. -type OutputUnion struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateListItemResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PackageInfo defines model for package_info. -type PackageInfo struct { - Agent *struct { - Privileges *struct { - Root *bool `json:"root,omitempty"` - } `json:"privileges,omitempty"` - } `json:"agent,omitempty"` - AssetTags *[]struct { - AssetIds *[]string `json:"asset_ids,omitempty"` - AssetTypes *[]string `json:"asset_types,omitempty"` - Text string `json:"text"` - } `json:"asset_tags,omitempty"` - Assets map[string]interface{} `json:"assets"` - Categories *[]string `json:"categories,omitempty"` - Conditions *PackageInfo_Conditions `json:"conditions,omitempty"` - DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` - Description *string `json:"description,omitempty"` - Discovery *PackageInfo_Discovery `json:"discovery,omitempty"` - Download *string `json:"download,omitempty"` - Elasticsearch *map[string]interface{} `json:"elasticsearch,omitempty"` - FormatVersion *string `json:"format_version,omitempty"` - Icons *[]PackageInfo_Icons_Item `json:"icons,omitempty"` - InstallationInfo *PackageInfo_InstallationInfo `json:"installationInfo,omitempty"` - Internal *bool `json:"internal,omitempty"` - KeepPoliciesUpToDate *bool `json:"keepPoliciesUpToDate,omitempty"` - LatestVersion *string `json:"latestVersion,omitempty"` - License *string `json:"license,omitempty"` - LicensePath *string `json:"licensePath,omitempty"` - Name string `json:"name"` - Notice *string `json:"notice,omitempty"` - Owner *PackageInfo_Owner `json:"owner,omitempty"` - Path *string `json:"path,omitempty"` - PolicyTemplates *[]map[string]interface{} `json:"policy_templates,omitempty"` - Readme *string `json:"readme,omitempty"` - Release *PackageInfoRelease `json:"release,omitempty"` - Screenshots *[]struct { - DarkMode *bool `json:"dark_mode,omitempty"` - Path *string `json:"path,omitempty"` - Size *string `json:"size,omitempty"` - Src string `json:"src"` - Title *string `json:"title,omitempty"` - Type *string `json:"type,omitempty"` - } `json:"screenshots,omitempty"` - SignaturePath *string `json:"signature_path,omitempty"` - Source *PackageInfo_Source `json:"source,omitempty"` - Status *string `json:"status,omitempty"` - Title string `json:"title"` - Type *PackageInfo_Type `json:"type,omitempty"` - Vars *[]map[string]interface{} `json:"vars,omitempty"` - Version string `json:"version"` - AdditionalProperties map[string]interface{} `json:"-"` +type ExportListItemsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON404 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } -// PackageInfo_Conditions_Elastic defines model for PackageInfo.Conditions.Elastic. -type PackageInfo_Conditions_Elastic struct { - Capabilities *[]string `json:"capabilities,omitempty"` - Subscription *string `json:"subscription,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r ExportListItemsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackageInfo_Conditions_Kibana defines model for PackageInfo.Conditions.Kibana. -type PackageInfo_Conditions_Kibana struct { - Version *string `json:"version,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r ExportListItemsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PackageInfo_Conditions defines model for PackageInfo.Conditions. -type PackageInfo_Conditions struct { - Elastic *PackageInfo_Conditions_Elastic `json:"elastic,omitempty"` - Kibana *PackageInfo_Conditions_Kibana `json:"kibana,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type FindListItemsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Cursor Returns the items that come after the last item returned in the previous call (use the `cursor` value returned in the previous call). This parameter uses the `tie_breaker_id` field to ensure all items are sorted and returned correctly. + Cursor SecurityListsAPIFindListItemsCursor `json:"cursor"` + Data []SecurityListsAPIListItem `json:"data"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Total int `json:"total"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } -// PackageInfo_Discovery_Fields_Item defines model for PackageInfo.Discovery.Fields.Item. -type PackageInfo_Discovery_Fields_Item struct { - Name string `json:"name"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r FindListItemsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackageInfo_Discovery defines model for PackageInfo.Discovery. -type PackageInfo_Discovery struct { - Fields *[]PackageInfo_Discovery_Fields_Item `json:"fields,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r FindListItemsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PackageInfo_Icons_Item defines model for package_info.icons.Item. -type PackageInfo_Icons_Item struct { - DarkMode *bool `json:"dark_mode,omitempty"` - Path *string `json:"path,omitempty"` - Size *string `json:"size,omitempty"` - Src string `json:"src"` - Title *string `json:"title,omitempty"` - Type *string `json:"type,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type ImportListItemsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityListsAPIList + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON409 *SecurityListsAPISiemErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse } -// PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 defines model for PackageInfo.InstallationInfo.AdditionalSpacesInstalledKibana.Type.0. -type PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 string +// Status returns HTTPResponse.Status +func (r ImportListItemsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 defines model for . -type PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 = string +// StatusCode returns HTTPResponse.StatusCode +func (r ImportListItemsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type defines model for PackageInfo.InstallationInfo.AdditionalSpacesInstalledKibana.Type. -type PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type struct { - union json.RawMessage +type ReadListPrivilegesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + IsAuthenticated bool `json:"is_authenticated"` + ListItems SecurityListsAPIListItemPrivileges `json:"listItems"` + Lists SecurityListsAPIListPrivileges `json:"lists"` + } + JSON400 *struct { + union json.RawMessage + } + JSON401 *SecurityListsAPIPlatformErrorResponse + JSON403 *SecurityListsAPIPlatformErrorResponse + JSON500 *SecurityListsAPISiemErrorResponse +} + +// Status returns HTTPResponse.Status +func (r ReadListPrivilegesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item defines model for PackageInfo.InstallationInfo.AdditionalSpacesInstalledKibana.Item. -type PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item struct { - Id string `json:"id"` - OriginId *string `json:"originId,omitempty"` - Type PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type `json:"type"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r ReadListPrivilegesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features defines model for PackageInfo.InstallationInfo.ExperimentalDataStreamFeatures.Features. -type PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features struct { - DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` - DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` - SyntheticSource *bool `json:"synthetic_source,omitempty"` - Tsdb *bool `json:"tsdb,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type DeleteLogstashPipelineResponse struct { + Body []byte + HTTPResponse *http.Response } -// PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item defines model for PackageInfo.InstallationInfo.ExperimentalDataStreamFeatures.Item. -type PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item struct { - DataStream string `json:"data_stream"` - Features PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features `json:"features"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r DeleteLogstashPipelineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackageInfoInstallationInfoInstallSource defines model for PackageInfo.InstallationInfo.InstallSource. -type PackageInfoInstallationInfoInstallSource string - -// PackageInfoInstallationInfoInstallStatus defines model for PackageInfo.InstallationInfo.InstallStatus. -type PackageInfoInstallationInfoInstallStatus string +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteLogstashPipelineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PackageInfoInstallationInfoInstalledEsType defines model for PackageInfo.InstallationInfo.InstalledEs.Type. -type PackageInfoInstallationInfoInstalledEsType string +type GetLogstashPipelineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} +} -// PackageInfo_InstallationInfo_InstalledEs_Item defines model for PackageInfo.InstallationInfo.InstalledEs.Item. -type PackageInfo_InstallationInfo_InstalledEs_Item struct { - Deferred *bool `json:"deferred,omitempty"` - Id string `json:"id"` - Type PackageInfoInstallationInfoInstalledEsType `json:"type"` - Version *string `json:"version,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r GetLogstashPipelineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackageInfoInstallationInfoInstalledKibanaType0 defines model for PackageInfo.InstallationInfo.InstalledKibana.Type.0. -type PackageInfoInstallationInfoInstalledKibanaType0 string +// StatusCode returns HTTPResponse.StatusCode +func (r GetLogstashPipelineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PackageInfoInstallationInfoInstalledKibanaType1 defines model for . -type PackageInfoInstallationInfoInstalledKibanaType1 = string +type PutLogstashPipelineResponse struct { + Body []byte + HTTPResponse *http.Response +} -// PackageInfo_InstallationInfo_InstalledKibana_Type defines model for PackageInfo.InstallationInfo.InstalledKibana.Type. -type PackageInfo_InstallationInfo_InstalledKibana_Type struct { - union json.RawMessage +// Status returns HTTPResponse.Status +func (r PutLogstashPipelineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackageInfo_InstallationInfo_InstalledKibana_Item defines model for PackageInfo.InstallationInfo.InstalledKibana.Item. -type PackageInfo_InstallationInfo_InstalledKibana_Item struct { - Id string `json:"id"` - OriginId *string `json:"originId,omitempty"` - Type PackageInfo_InstallationInfo_InstalledKibana_Type `json:"type"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r PutLogstashPipelineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PackageInfo_InstallationInfo_LatestExecutedState defines model for PackageInfo.InstallationInfo.LatestExecutedState. -type PackageInfo_InstallationInfo_LatestExecutedState struct { - Error *string `json:"error,omitempty"` - Name *string `json:"name,omitempty"` - StartedAt *string `json:"started_at,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type GetLogstashPipelinesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} } -// PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error defines model for PackageInfo.InstallationInfo.LatestInstallFailedAttempts.Error. -type PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error struct { - Message string `json:"message"` - Name string `json:"name"` - Stack *string `json:"stack,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r GetLogstashPipelinesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item defines model for PackageInfo.InstallationInfo.LatestInstallFailedAttempts.Item. -type PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item struct { - CreatedAt string `json:"created_at"` - Error PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error `json:"error"` - TargetVersion string `json:"target_version"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetLogstashPipelinesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PackageInfoInstallationInfoVerificationStatus defines model for PackageInfo.InstallationInfo.VerificationStatus. -type PackageInfoInstallationInfoVerificationStatus string +type PostMaintenanceWindowIdArchiveResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` -// PackageInfo_InstallationInfo defines model for PackageInfo.InstallationInfo. -type PackageInfo_InstallationInfo struct { - AdditionalSpacesInstalledKibana *map[string][]PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item `json:"additional_spaces_installed_kibana,omitempty"` - CreatedAt *string `json:"created_at,omitempty"` - ExperimentalDataStreamFeatures *[]PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item `json:"experimental_data_stream_features,omitempty"` - InstallFormatSchemaVersion *string `json:"install_format_schema_version,omitempty"` - InstallSource PackageInfoInstallationInfoInstallSource `json:"install_source"` - InstallStatus PackageInfoInstallationInfoInstallStatus `json:"install_status"` - InstalledEs []PackageInfo_InstallationInfo_InstalledEs_Item `json:"installed_es"` - InstalledKibana []PackageInfo_InstallationInfo_InstalledKibana_Item `json:"installed_kibana"` - InstalledKibanaSpaceId *string `json:"installed_kibana_space_id,omitempty"` - LatestExecutedState *PackageInfo_InstallationInfo_LatestExecutedState `json:"latest_executed_state,omitempty"` - LatestInstallFailedAttempts *[]PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item `json:"latest_install_failed_attempts,omitempty"` - Name string `json:"name"` - Namespaces *[]string `json:"namespaces,omitempty"` - Type string `json:"type"` - UpdatedAt *string `json:"updated_at,omitempty"` - VerificationKeyId *string `json:"verification_key_id,omitempty"` - VerificationStatus PackageInfoInstallationInfoVerificationStatus `json:"verification_status"` - Version string `json:"version"` - AdditionalProperties map[string]interface{} `json:"-"` -} + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` -// PackageInfoOwnerType defines model for PackageInfo.Owner.Type. -type PackageInfoOwnerType string + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` -// PackageInfo_Owner defines model for PackageInfo.Owner. -type PackageInfo_Owner struct { - Github *string `json:"github,omitempty"` - Type *PackageInfoOwnerType `json:"type,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` -} + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` -// PackageInfoRelease defines model for PackageInfo.Release. -type PackageInfoRelease string + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` -// PackageInfo_Source defines model for PackageInfo.Source. -type PackageInfo_Source struct { - License string `json:"license"` - AdditionalProperties map[string]interface{} `json:"-"` -} + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` -// PackageInfoType0 defines model for PackageInfo.Type.0. -type PackageInfoType0 string + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` -// PackageInfoType1 defines model for PackageInfo.Type.1. -type PackageInfoType1 string + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` -// PackageInfoType2 defines model for PackageInfo.Type.2. -type PackageInfoType2 string + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` -// PackageInfoType3 defines model for . -type PackageInfoType3 = string + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` -// PackageInfo_Type defines model for PackageInfo.Type. -type PackageInfo_Type struct { - union json.RawMessage -} + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` -// PackageListItem defines model for package_list_item. -type PackageListItem struct { - Categories *[]string `json:"categories,omitempty"` - Conditions *PackageListItem_Conditions `json:"conditions,omitempty"` - DataStreams *[]map[string]interface{} `json:"data_streams,omitempty"` - Description *string `json:"description,omitempty"` - Discovery *PackageListItem_Discovery `json:"discovery,omitempty"` - Download *string `json:"download,omitempty"` - FormatVersion *string `json:"format_version,omitempty"` - Icons *[]PackageListItem_Icons_Item `json:"icons,omitempty"` - Id string `json:"id"` - InstallationInfo *PackageListItem_InstallationInfo `json:"installationInfo,omitempty"` - Integration *string `json:"integration,omitempty"` - Internal *bool `json:"internal,omitempty"` - LatestVersion *string `json:"latestVersion,omitempty"` - Name string `json:"name"` - Owner *PackageListItem_Owner `json:"owner,omitempty"` - Path *string `json:"path,omitempty"` - PolicyTemplates *[]map[string]interface{} `json:"policy_templates,omitempty"` - Readme *string `json:"readme,omitempty"` - Release *PackageListItemRelease `json:"release,omitempty"` - SignaturePath *string `json:"signature_path,omitempty"` - Source *PackageListItem_Source `json:"source,omitempty"` - Status *string `json:"status,omitempty"` - Title string `json:"title"` - Type *PackageListItem_Type `json:"type,omitempty"` - Vars *[]map[string]interface{} `json:"vars,omitempty"` - Version string `json:"version"` - AdditionalProperties map[string]interface{} `json:"-"` -} + // Status The current status of the maintenance window. + Status PostMaintenanceWindowIdArchive200Status `json:"status"` -// PackageListItem_Conditions_Elastic defines model for PackageListItem.Conditions.Elastic. -type PackageListItem_Conditions_Elastic struct { - Capabilities *[]string `json:"capabilities,omitempty"` - Subscription *string `json:"subscription,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` -} + // Title The name of the maintenance window. + Title string `json:"title"` -// PackageListItem_Conditions_Kibana defines model for PackageListItem.Conditions.Kibana. -type PackageListItem_Conditions_Kibana struct { - Version *string `json:"version,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` -} + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` -// PackageListItem_Conditions defines model for PackageListItem.Conditions. -type PackageListItem_Conditions struct { - Elastic *PackageListItem_Conditions_Elastic `json:"elastic,omitempty"` - Kibana *PackageListItem_Conditions_Kibana `json:"kibana,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } } +type PostMaintenanceWindowIdArchive200Status string -// PackageListItem_Discovery_Fields_Item defines model for PackageListItem.Discovery.Fields.Item. -type PackageListItem_Discovery_Fields_Item struct { - Name string `json:"name"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r PostMaintenanceWindowIdArchiveResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackageListItem_Discovery defines model for PackageListItem.Discovery. -type PackageListItem_Discovery struct { - Fields *[]PackageListItem_Discovery_Fields_Item `json:"fields,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostMaintenanceWindowIdArchiveResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PackageListItem_Icons_Item defines model for package_list_item.icons.Item. -type PackageListItem_Icons_Item struct { - DarkMode *bool `json:"dark_mode,omitempty"` - Path *string `json:"path,omitempty"` - Size *string `json:"size,omitempty"` - Src string `json:"src"` - Title *string `json:"title,omitempty"` - Type *string `json:"type,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` -} +type PostMaintenanceWindowIdUnarchiveResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` -// PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 defines model for PackageListItem.InstallationInfo.AdditionalSpacesInstalledKibana.Type.0. -type PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 string + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` -// PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 defines model for . -type PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 = string + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` -// PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type defines model for PackageListItem.InstallationInfo.AdditionalSpacesInstalledKibana.Type. -type PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type struct { - union json.RawMessage -} + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` -// PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item defines model for PackageListItem.InstallationInfo.AdditionalSpacesInstalledKibana.Item. -type PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item struct { - Id string `json:"id"` - OriginId *string `json:"originId,omitempty"` - Type PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type `json:"type"` - AdditionalProperties map[string]interface{} `json:"-"` -} + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` -// PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features defines model for PackageListItem.InstallationInfo.ExperimentalDataStreamFeatures.Features. -type PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features struct { - DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` - DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` - SyntheticSource *bool `json:"synthetic_source,omitempty"` - Tsdb *bool `json:"tsdb,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` -} + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` -// PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item defines model for PackageListItem.InstallationInfo.ExperimentalDataStreamFeatures.Item. -type PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item struct { - DataStream string `json:"data_stream"` - Features PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features `json:"features"` - AdditionalProperties map[string]interface{} `json:"-"` -} + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` -// PackageListItemInstallationInfoInstallSource defines model for PackageListItem.InstallationInfo.InstallSource. -type PackageListItemInstallationInfoInstallSource string + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` -// PackageListItemInstallationInfoInstallStatus defines model for PackageListItem.InstallationInfo.InstallStatus. -type PackageListItemInstallationInfoInstallStatus string + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` -// PackageListItemInstallationInfoInstalledEsType defines model for PackageListItem.InstallationInfo.InstalledEs.Type. -type PackageListItemInstallationInfoInstalledEsType string + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` -// PackageListItem_InstallationInfo_InstalledEs_Item defines model for PackageListItem.InstallationInfo.InstalledEs.Item. -type PackageListItem_InstallationInfo_InstalledEs_Item struct { - Deferred *bool `json:"deferred,omitempty"` - Id string `json:"id"` - Type PackageListItemInstallationInfoInstalledEsType `json:"type"` - Version *string `json:"version,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` -} + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` -// PackageListItemInstallationInfoInstalledKibanaType0 defines model for PackageListItem.InstallationInfo.InstalledKibana.Type.0. -type PackageListItemInstallationInfoInstalledKibanaType0 string + // Status The current status of the maintenance window. + Status PostMaintenanceWindowIdUnarchive200Status `json:"status"` -// PackageListItemInstallationInfoInstalledKibanaType1 defines model for . -type PackageListItemInstallationInfoInstalledKibanaType1 = string + // Title The name of the maintenance window. + Title string `json:"title"` -// PackageListItem_InstallationInfo_InstalledKibana_Type defines model for PackageListItem.InstallationInfo.InstalledKibana.Type. -type PackageListItem_InstallationInfo_InstalledKibana_Type struct { - union json.RawMessage -} + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` -// PackageListItem_InstallationInfo_InstalledKibana_Item defines model for PackageListItem.InstallationInfo.InstalledKibana.Item. -type PackageListItem_InstallationInfo_InstalledKibana_Item struct { - Id string `json:"id"` - OriginId *string `json:"originId,omitempty"` - Type PackageListItem_InstallationInfo_InstalledKibana_Type `json:"type"` - AdditionalProperties map[string]interface{} `json:"-"` + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } } +type PostMaintenanceWindowIdUnarchive200Status string -// PackageListItem_InstallationInfo_LatestExecutedState defines model for PackageListItem.InstallationInfo.LatestExecutedState. -type PackageListItem_InstallationInfo_LatestExecutedState struct { - Error *string `json:"error,omitempty"` - Name *string `json:"name,omitempty"` - StartedAt *string `json:"started_at,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r PostMaintenanceWindowIdUnarchiveResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error defines model for PackageListItem.InstallationInfo.LatestInstallFailedAttempts.Error. -type PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error struct { - Message string `json:"message"` - Name string `json:"name"` - Stack *string `json:"stack,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostMaintenanceWindowIdUnarchiveResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item defines model for PackageListItem.InstallationInfo.LatestInstallFailedAttempts.Item. -type PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item struct { - CreatedAt string `json:"created_at"` - Error PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error `json:"error"` - TargetVersion string `json:"target_version"` - AdditionalProperties map[string]interface{} `json:"-"` +type MlSyncResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *MachineLearningAPIsMlSync200Response + JSON401 *MachineLearningAPIsMlSync4xxResponse } -// PackageListItemInstallationInfoVerificationStatus defines model for PackageListItem.InstallationInfo.VerificationStatus. -type PackageListItemInstallationInfoVerificationStatus string - -// PackageListItem_InstallationInfo defines model for PackageListItem.InstallationInfo. -type PackageListItem_InstallationInfo struct { - AdditionalSpacesInstalledKibana *map[string][]PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item `json:"additional_spaces_installed_kibana,omitempty"` - CreatedAt *string `json:"created_at,omitempty"` - ExperimentalDataStreamFeatures *[]PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item `json:"experimental_data_stream_features,omitempty"` - InstallFormatSchemaVersion *string `json:"install_format_schema_version,omitempty"` - InstallSource PackageListItemInstallationInfoInstallSource `json:"install_source"` - InstallStatus PackageListItemInstallationInfoInstallStatus `json:"install_status"` - InstalledEs []PackageListItem_InstallationInfo_InstalledEs_Item `json:"installed_es"` - InstalledKibana []PackageListItem_InstallationInfo_InstalledKibana_Item `json:"installed_kibana"` - InstalledKibanaSpaceId *string `json:"installed_kibana_space_id,omitempty"` - LatestExecutedState *PackageListItem_InstallationInfo_LatestExecutedState `json:"latest_executed_state,omitempty"` - LatestInstallFailedAttempts *[]PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item `json:"latest_install_failed_attempts,omitempty"` - Name string `json:"name"` - Namespaces *[]string `json:"namespaces,omitempty"` - Type string `json:"type"` - UpdatedAt *string `json:"updated_at,omitempty"` - VerificationKeyId *string `json:"verification_key_id,omitempty"` - VerificationStatus PackageListItemInstallationInfoVerificationStatus `json:"verification_status"` - Version string `json:"version"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r MlSyncResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackageListItemOwnerType defines model for PackageListItem.Owner.Type. -type PackageListItemOwnerType string - -// PackageListItem_Owner defines model for PackageListItem.Owner. -type PackageListItem_Owner struct { - Github *string `json:"github,omitempty"` - Type *PackageListItemOwnerType `json:"type,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// StatusCode returns HTTPResponse.StatusCode +func (r MlSyncResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PackageListItemRelease defines model for PackageListItem.Release. -type PackageListItemRelease string - -// PackageListItem_Source defines model for PackageListItem.Source. -type PackageListItem_Source struct { - License string `json:"license"` - AdditionalProperties map[string]interface{} `json:"-"` +type DeleteNoteResponse struct { + Body []byte + HTTPResponse *http.Response } -// PackageListItemType0 defines model for PackageListItem.Type.0. -type PackageListItemType0 string - -// PackageListItemType1 defines model for PackageListItem.Type.1. -type PackageListItemType1 string +// Status returns HTTPResponse.Status +func (r DeleteNoteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PackageListItemType2 defines model for PackageListItem.Type.2. -type PackageListItemType2 string +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteNoteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PackageListItemType3 defines model for . -type PackageListItemType3 = string +type GetNotesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIGetNotesResult +} -// PackageListItem_Type defines model for PackageListItem.Type. -type PackageListItem_Type struct { - union json.RawMessage +// Status returns HTTPResponse.Status +func (r GetNotesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackagePolicy defines model for package_policy. -type PackagePolicy struct { - // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. - AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions,omitempty"` - Agents *float32 `json:"agents,omitempty"` - CreatedAt string `json:"created_at"` - CreatedBy string `json:"created_by"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetNotesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Description Package policy description - Description *string `json:"description,omitempty"` - Elasticsearch *PackagePolicy_Elasticsearch `json:"elasticsearch,omitempty"` - Enabled bool `json:"enabled"` - Id string `json:"id"` +type PersistNoteRouteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIResponseNote +} - // Inputs Package policy inputs (see integration documentation to know what inputs are available) - Inputs map[string]PackagePolicyInput `json:"inputs"` - IsManaged *bool `json:"is_managed,omitempty"` +// Status returns HTTPResponse.Status +func (r PersistNoteRouteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Name Package policy name (should be unique) - Name string `json:"name"` +// StatusCode returns HTTPResponse.StatusCode +func (r PersistNoteRouteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. - Namespace *string `json:"namespace,omitempty"` - OutputId *string `json:"output_id,omitempty"` +type ObservabilityAiAssistantChatCompleteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} +} - // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. - Overrides *struct { - Inputs *map[string]interface{} `json:"inputs,omitempty"` - } `json:"overrides,omitempty"` - Package *struct { - ExperimentalDataStreamFeatures *[]struct { - DataStream string `json:"data_stream"` - Features struct { - DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` - DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` - SyntheticSource *bool `json:"synthetic_source,omitempty"` - Tsdb *bool `json:"tsdb,omitempty"` - } `json:"features"` - } `json:"experimental_data_stream_features,omitempty"` +// Status returns HTTPResponse.Status +func (r ObservabilityAiAssistantChatCompleteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Name Package name - Name string `json:"name"` - RequiresRoot *bool `json:"requires_root,omitempty"` - Title *string `json:"title,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r ObservabilityAiAssistantChatCompleteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Version Package version - Version string `json:"version"` - } `json:"package,omitempty"` +type OsqueryFindLiveQueriesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPIFindLiveQueryResponse +} - // PolicyId Agent policy ID where that package policy will be added - // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set - PolicyId *string `json:"policy_id,omitempty"` - PolicyIds *[]string `json:"policy_ids,omitempty"` - Revision float32 `json:"revision"` - SecretReferences *[]PackagePolicySecretRef `json:"secret_references,omitempty"` - SpaceIds *[]string `json:"spaceIds,omitempty"` +// Status returns HTTPResponse.Status +func (r OsqueryFindLiveQueriesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. - SupportsAgentless *bool `json:"supports_agentless,omitempty"` - UpdatedAt string `json:"updated_at"` - UpdatedBy string `json:"updated_by"` - Vars *map[string]interface{} `json:"vars,omitempty"` - Version *string `json:"version,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryFindLiveQueriesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PackagePolicy_Elasticsearch_Privileges defines model for PackagePolicy.Elasticsearch.Privileges. -type PackagePolicy_Elasticsearch_Privileges struct { - Cluster *[]string `json:"cluster,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +type OsqueryCreateLiveQueryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPICreateLiveQueryResponse } -// PackagePolicy_Elasticsearch defines model for PackagePolicy.Elasticsearch. -type PackagePolicy_Elasticsearch struct { - Privileges *PackagePolicy_Elasticsearch_Privileges `json:"privileges,omitempty"` - AdditionalProperties map[string]interface{} `json:"-"` +// Status returns HTTPResponse.Status +func (r OsqueryCreateLiveQueryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackagePolicyInput defines model for package_policy_input. -type PackagePolicyInput struct { - // Enabled enable or disable that input, (default to true) - Enabled *bool `json:"enabled,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryCreateLiveQueryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Streams Input streams (see integration documentation to know what streams are available) - Streams *map[string]PackagePolicyInputStream `json:"streams,omitempty"` - Vars *map[string]interface{} `json:"vars,omitempty"` +type OsqueryGetLiveQueryDetailsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPIFindLiveQueryDetailsResponse } -// PackagePolicyInputStream defines model for package_policy_input_stream. -type PackagePolicyInputStream struct { - // Enabled enable or disable that stream, (default to true) - Enabled *bool `json:"enabled,omitempty"` - Vars *map[string]interface{} `json:"vars,omitempty"` +// Status returns HTTPResponse.Status +func (r OsqueryGetLiveQueryDetailsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackagePolicyRequest defines model for package_policy_request. -type PackagePolicyRequest struct { - // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. - AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions,omitempty"` - Description *string `json:"description,omitempty"` - Force *bool `json:"force,omitempty"` - Id *string `json:"id,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryGetLiveQueryDetailsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Inputs Package policy inputs (see integration documentation to know what inputs are available) - Inputs *map[string]PackagePolicyRequestInput `json:"inputs,omitempty"` - Name string `json:"name"` - Namespace *string `json:"namespace,omitempty"` - OutputId *string `json:"output_id,omitempty"` - Package PackagePolicyRequestPackage `json:"package"` - PolicyId *string `json:"policy_id,omitempty"` - PolicyIds *[]string `json:"policy_ids,omitempty"` +type OsqueryGetLiveQueryResultsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPIGetLiveQueryResultsResponse +} - // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. - SupportsAgentless *bool `json:"supports_agentless,omitempty"` - Vars *map[string]interface{} `json:"vars,omitempty"` +// Status returns HTTPResponse.Status +func (r OsqueryGetLiveQueryResultsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackagePolicyRequestInput defines model for package_policy_request_input. -type PackagePolicyRequestInput struct { - // Enabled enable or disable that input, (default to true) - Enabled *bool `json:"enabled,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryGetLiveQueryResultsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Streams Input streams (see integration documentation to know what streams are available) - Streams *map[string]PackagePolicyRequestInputStream `json:"streams,omitempty"` - Vars *map[string]interface{} `json:"vars,omitempty"` +type OsqueryFindPacksResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPIFindPacksResponse } -// PackagePolicyRequestInputStream defines model for package_policy_request_input_stream. -type PackagePolicyRequestInputStream struct { - // Enabled enable or disable that stream, (default to true) - Enabled *bool `json:"enabled,omitempty"` - Vars *map[string]interface{} `json:"vars,omitempty"` +// Status returns HTTPResponse.Status +func (r OsqueryFindPacksResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackagePolicyRequestPackage defines model for package_policy_request_package. -type PackagePolicyRequestPackage struct { - ExperimentalDataStreamFeatures *[]struct { - DataStream string `json:"data_stream"` - Features struct { - DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` - DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` - SyntheticSource *bool `json:"synthetic_source,omitempty"` - Tsdb *bool `json:"tsdb,omitempty"` - } `json:"features"` - } `json:"experimental_data_stream_features,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryFindPacksResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Name Package name - Name string `json:"name"` - RequiresRoot *bool `json:"requires_root,omitempty"` - Title *string `json:"title,omitempty"` +type OsqueryCreatePacksResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPICreatePacksResponse +} - // Version Package version - Version string `json:"version"` +// Status returns HTTPResponse.Status +func (r OsqueryCreatePacksResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PackagePolicySecretRef defines model for package_policy_secret_ref. -type PackagePolicySecretRef struct { - Id string `json:"id"` +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryCreatePacksResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PagerdutyConfig Defines properties for connectors when type is `.pagerduty`. -type PagerdutyConfig struct { - // ApiUrl The PagerDuty event URL. - ApiUrl *string `json:"apiUrl,omitempty"` +type OsqueryDeletePacksResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} } -// PagerdutySecrets Defines secrets for connectors when type is `.pagerduty`. -type PagerdutySecrets struct { - // RoutingKey A 32 character PagerDuty Integration Key for an integration on a service. - RoutingKey string `json:"routingKey"` +// Status returns HTTPResponse.Status +func (r OsqueryDeletePacksResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// Pfx If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-pfx`, it is a base64 encoded version of the PFX or P12 file. -type Pfx = string +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryDeletePacksResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// ResilientConfig Defines properties for connectors when type is `.resilient`. -type ResilientConfig struct { - // ApiUrl The IBM Resilient instance URL. - ApiUrl string `json:"apiUrl"` +type OsqueryGetPacksDetailsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPIFindPackResponse +} - // OrgId The IBM Resilient organization ID. - OrgId string `json:"orgId"` +// Status returns HTTPResponse.Status +func (r OsqueryGetPacksDetailsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// ResilientSecrets Defines secrets for connectors when type is `.resilient`. -type ResilientSecrets struct { - // ApiKeyId The authentication key ID for HTTP Basic authentication. - ApiKeyId string `json:"apiKeyId"` +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryGetPacksDetailsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // ApiKeySecret The authentication key secret for HTTP Basic authentication. - ApiKeySecret string `json:"apiKeySecret"` +type OsqueryUpdatePacksResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPIUpdatePacksResponse } -// SentineloneConfig Defines properties for connectors when type is `.sentinelone`. -type SentineloneConfig struct { - // Url The SentinelOne tenant URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - Url string `json:"url"` +// Status returns HTTPResponse.Status +func (r OsqueryUpdatePacksResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// SentineloneSecrets Defines secrets for connectors when type is `.sentinelone`. -type SentineloneSecrets struct { - // Token The A SentinelOne API token. - Token string `json:"token"` +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryUpdatePacksResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// ServerHost defines model for server_host. -type ServerHost struct { - HostUrls []string `json:"host_urls"` - Id string `json:"id"` - IsDefault *bool `json:"is_default,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Name string `json:"name"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *struct { - Ssl *struct { - EsKey *ServerHost_Secrets_Ssl_EsKey `json:"es_key,omitempty"` - Key *ServerHost_Secrets_Ssl_Key `json:"key,omitempty"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - Ssl *struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - ClientAuth *ServerHostSslClientAuth `json:"client_auth,omitempty"` - EsCertificate *string `json:"es_certificate,omitempty"` - EsCertificateAuthorities *[]string `json:"es_certificate_authorities,omitempty"` - EsKey *string `json:"es_key,omitempty"` - Key *string `json:"key,omitempty"` - } `json:"ssl,omitempty"` +type OsqueryFindSavedQueriesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPIFindSavedQueryResponse } -// ServerHostSecretsSslEsKey0 defines model for . -type ServerHostSecretsSslEsKey0 struct { - Id string `json:"id"` +// Status returns HTTPResponse.Status +func (r OsqueryFindSavedQueriesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// ServerHostSecretsSslEsKey1 defines model for . -type ServerHostSecretsSslEsKey1 = string +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryFindSavedQueriesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// ServerHost_Secrets_Ssl_EsKey defines model for ServerHost.Secrets.Ssl.EsKey. -type ServerHost_Secrets_Ssl_EsKey struct { - union json.RawMessage +type OsqueryCreateSavedQueryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPICreateSavedQueryResponse } -// ServerHostSecretsSslKey0 defines model for . -type ServerHostSecretsSslKey0 struct { - Id string `json:"id"` +// Status returns HTTPResponse.Status +func (r OsqueryCreateSavedQueryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// ServerHostSecretsSslKey1 defines model for . -type ServerHostSecretsSslKey1 = string +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryCreateSavedQueryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// ServerHost_Secrets_Ssl_Key defines model for ServerHost.Secrets.Ssl.Key. -type ServerHost_Secrets_Ssl_Key struct { - union json.RawMessage +type OsqueryDeleteSavedQueryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPIDefaultSuccessResponse } -// ServerHostSslClientAuth defines model for ServerHost.Ssl.ClientAuth. -type ServerHostSslClientAuth string +// Status returns HTTPResponse.Status +func (r OsqueryDeleteSavedQueryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// ServicenowConfig Defines properties for connectors when type is `.servicenow`. -type ServicenowConfig struct { - // ApiUrl The ServiceNow instance URL. - ApiUrl string `json:"apiUrl"` +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryDeleteSavedQueryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // ClientId The client ID assigned to your OAuth application. This property is required when `isOAuth` is `true`. - ClientId *string `json:"clientId,omitempty"` +type OsqueryGetSavedQueryDetailsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPIFindSavedQueryDetailResponse +} - // IsOAuth The type of authentication to use. The default value is false, which means basic authentication is used instead of open authorization (OAuth). - IsOAuth *bool `json:"isOAuth,omitempty"` +// Status returns HTTPResponse.Status +func (r OsqueryGetSavedQueryDetailsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // JwtKeyId The key identifier assigned to the JWT verifier map of your OAuth application. This property is required when `isOAuth` is `true`. - JwtKeyId *string `json:"jwtKeyId,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryGetSavedQueryDetailsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // UserIdentifierValue The identifier to use for OAuth authentication. This identifier should be the user field you selected when you created an OAuth JWT API endpoint for external clients in your ServiceNow instance. For example, if the selected user field is `Email`, the user identifier should be the user's email address. This property is required when `isOAuth` is `true`. - UserIdentifierValue *string `json:"userIdentifierValue,omitempty"` +type OsqueryUpdateSavedQueryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityOsqueryAPIUpdateSavedQueryResponse +} - // UsesTableApi Determines whether the connector uses the Table API or the Import Set API. This property is supported only for ServiceNow ITSM and ServiceNow SecOps connectors. NOTE: If this property is set to `false`, the Elastic application should be installed in ServiceNow. - UsesTableApi *bool `json:"usesTableApi,omitempty"` +// Status returns HTTPResponse.Status +func (r OsqueryUpdateSavedQueryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// ServicenowItomConfig Defines properties for connectors when type is `.servicenow-itom`. -type ServicenowItomConfig struct { - // ApiUrl The ServiceNow instance URL. - ApiUrl string `json:"apiUrl"` +// StatusCode returns HTTPResponse.StatusCode +func (r OsqueryUpdateSavedQueryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // ClientId The client ID assigned to your OAuth application. This property is required when `isOAuth` is `true`. - ClientId *string `json:"clientId,omitempty"` +type PersistPinnedEventRouteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIPersistPinnedEventResponse +} - // IsOAuth The type of authentication to use. The default value is false, which means basic authentication is used instead of open authorization (OAuth). - IsOAuth *bool `json:"isOAuth,omitempty"` +// Status returns HTTPResponse.Status +func (r PersistPinnedEventRouteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // JwtKeyId The key identifier assigned to the JWT verifier map of your OAuth application. This property is required when `isOAuth` is `true`. - JwtKeyId *string `json:"jwtKeyId,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r PersistPinnedEventRouteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // UserIdentifierValue The identifier to use for OAuth authentication. This identifier should be the user field you selected when you created an OAuth JWT API endpoint for external clients in your ServiceNow instance. For example, if the selected user field is `Email`, the user identifier should be the user's email address. This property is required when `isOAuth` is `true`. - UserIdentifierValue *string `json:"userIdentifierValue,omitempty"` +type CleanUpRiskEngineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + CleanupSuccessful *bool `json:"cleanup_successful,omitempty"` + } + JSON400 *SecurityEntityAnalyticsAPITaskManagerUnavailableResponse + JSONDefault *SecurityEntityAnalyticsAPICleanUpRiskEngineErrorResponse } -// ServicenowSecrets Defines secrets for connectors when type is `.servicenow`, `.servicenow-sir`, or `.servicenow-itom`. -type ServicenowSecrets struct { - // ClientSecret The client secret assigned to your OAuth application. This property is required when `isOAuth` is `true`. - ClientSecret *string `json:"clientSecret,omitempty"` +// Status returns HTTPResponse.Status +func (r CleanUpRiskEngineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Password The password for HTTP basic authentication. This property is required when `isOAuth` is `false`. - Password *string `json:"password,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r CleanUpRiskEngineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // PrivateKey The RSA private key that you created for use in ServiceNow. This property is required when `isOAuth` is `true`. - PrivateKey *string `json:"privateKey,omitempty"` +type ConfigureRiskEngineSavedObjectResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + RiskEngineSavedObjectConfigured *bool `json:"risk_engine_saved_object_configured,omitempty"` + } + JSON400 *SecurityEntityAnalyticsAPITaskManagerUnavailableResponse + JSONDefault *SecurityEntityAnalyticsAPIConfigureRiskEngineSavedObjectErrorResponse +} - // PrivateKeyPassword The password for the RSA private key. This property is required when `isOAuth` is `true` and you set a password on your private key. - PrivateKeyPassword *string `json:"privateKeyPassword,omitempty"` +// Status returns HTTPResponse.Status +func (r ConfigureRiskEngineSavedObjectResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Username The username for HTTP basic authentication. This property is required when `isOAuth` is `false`. - Username *string `json:"username,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r ConfigureRiskEngineSavedObjectResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// SlackApiConfig Defines properties for connectors when type is `.slack_api`. -type SlackApiConfig struct { - // AllowedChannels A list of valid Slack channels. - AllowedChannels *[]struct { - // Id The Slack channel ID. - Id string `json:"id"` +type ScheduleRiskEngineNowResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityEntityAnalyticsAPIRiskEngineScheduleNowResponse + JSON400 *SecurityEntityAnalyticsAPITaskManagerUnavailableResponse + JSONDefault *SecurityEntityAnalyticsAPIRiskEngineScheduleNowErrorResponse +} - // Name The Slack channel name. - Name string `json:"name"` - } `json:"allowedChannels,omitempty"` +// Status returns HTTPResponse.Status +func (r ScheduleRiskEngineNowResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// SlackApiSecrets Defines secrets for connectors when type is `.slack`. -type SlackApiSecrets struct { - // Token Slack bot user OAuth token. - Token string `json:"token"` +// StatusCode returns HTTPResponse.StatusCode +func (r ScheduleRiskEngineNowResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// SwimlaneConfig Defines properties for connectors when type is `.swimlane`. -type SwimlaneConfig struct { - // ApiUrl The Swimlane instance URL. - ApiUrl string `json:"apiUrl"` +type BulkCreateSavedObjectsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *SavedObjects400Response +} - // AppId The Swimlane application ID. - AppId string `json:"appId"` +// Status returns HTTPResponse.Status +func (r BulkCreateSavedObjectsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // ConnectorType The type of connector. Valid values are `all`, `alerts`, and `cases`. - ConnectorType SwimlaneConfigConnectorType `json:"connectorType"` +// StatusCode returns HTTPResponse.StatusCode +func (r BulkCreateSavedObjectsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Mappings The field mapping. - Mappings *struct { - // AlertIdConfig Mapping for the alert ID. - AlertIdConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` +type BulkDeleteSavedObjectsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *SavedObjects400Response +} - // Id The identifier for the field in Swimlane. - Id string `json:"id"` +// Status returns HTTPResponse.Status +func (r BulkDeleteSavedObjectsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Key The key for the field in Swimlane. - Key string `json:"key"` +// StatusCode returns HTTPResponse.StatusCode +func (r BulkDeleteSavedObjectsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"alertIdConfig,omitempty"` +type BulkGetSavedObjectsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *SavedObjects400Response +} - // CaseIdConfig Mapping for the case ID. - CaseIdConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` +// Status returns HTTPResponse.Status +func (r BulkGetSavedObjectsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Id The identifier for the field in Swimlane. - Id string `json:"id"` +// StatusCode returns HTTPResponse.StatusCode +func (r BulkGetSavedObjectsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Key The key for the field in Swimlane. - Key string `json:"key"` +type BulkResolveSavedObjectsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *SavedObjects400Response +} - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"caseIdConfig,omitempty"` +// Status returns HTTPResponse.Status +func (r BulkResolveSavedObjectsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // CaseNameConfig Mapping for the case name. - CaseNameConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` +// StatusCode returns HTTPResponse.StatusCode +func (r BulkResolveSavedObjectsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Id The identifier for the field in Swimlane. - Id string `json:"id"` +type BulkUpdateSavedObjectsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *SavedObjects400Response +} - // Key The key for the field in Swimlane. - Key string `json:"key"` +// Status returns HTTPResponse.Status +func (r BulkUpdateSavedObjectsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"caseNameConfig,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r BulkUpdateSavedObjectsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // CommentsConfig Mapping for the case comments. - CommentsConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` +type PostSavedObjectsExportResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode PostSavedObjectsExport400StatusCode `json:"statusCode"` + } +} +type PostSavedObjectsExport400StatusCode int - // Id The identifier for the field in Swimlane. - Id string `json:"id"` +// Status returns HTTPResponse.Status +func (r PostSavedObjectsExportResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Key The key for the field in Swimlane. - Key string `json:"key"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostSavedObjectsExportResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"commentsConfig,omitempty"` +type FindSavedObjectsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *SavedObjects400Response +} - // DescriptionConfig Mapping for the case description. - DescriptionConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` +// Status returns HTTPResponse.Status +func (r FindSavedObjectsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Id The identifier for the field in Swimlane. - Id string `json:"id"` +// StatusCode returns HTTPResponse.StatusCode +func (r FindSavedObjectsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Key The key for the field in Swimlane. - Key string `json:"key"` +type PostSavedObjectsImportResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Errors Indicates the import was unsuccessful and specifies the objects that failed to import. + // + // NOTE: One object may result in multiple errors, which requires separate steps to resolve. For instance, a `missing_references` error and conflict error. + Errors []map[string]interface{} `json:"errors"` - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"descriptionConfig,omitempty"` + // Success Indicates when the import was successfully completed. When set to false, some objects may not have been created. For additional information, refer to the `errors` and `successResults` properties. + Success bool `json:"success"` - // RuleNameConfig Mapping for the name of the alert's rule. - RuleNameConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` + // SuccessCount Indicates the number of successfully imported records. + SuccessCount float32 `json:"successCount"` - // Id The identifier for the field in Swimlane. - Id string `json:"id"` + // SuccessResults Indicates the objects that are successfully imported, with any metadata if applicable. + // + // NOTE: Objects are created only when all resolvable errors are addressed, including conflicts and missing references. If objects are created as new copies, each entry in the `successResults` array includes a `destinationId` attribute. + SuccessResults []map[string]interface{} `json:"successResults"` + } + JSON400 *struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode PostSavedObjectsImport400StatusCode `json:"statusCode"` + } +} +type PostSavedObjectsImport400StatusCode int - // Key The key for the field in Swimlane. - Key string `json:"key"` +// Status returns HTTPResponse.Status +func (r PostSavedObjectsImportResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"ruleNameConfig,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostSavedObjectsImportResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // SeverityConfig Mapping for the severity. - SeverityConfig *struct { - // FieldType The type of field in Swimlane. - FieldType string `json:"fieldType"` +type ResolveImportErrorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Errors Specifies the objects that failed to resolve. + // + // NOTE: One object can result in multiple errors, which requires separate steps to resolve. For instance, a `missing_references` error and a `conflict` error. + Errors *[]map[string]interface{} `json:"errors,omitempty"` - // Id The identifier for the field in Swimlane. - Id string `json:"id"` + // Success Indicates a successful import. When set to `false`, some objects may not have been created. For additional information, refer to the `errors` and `successResults` properties. + Success *bool `json:"success,omitempty"` - // Key The key for the field in Swimlane. - Key string `json:"key"` + // SuccessCount Indicates the number of successfully resolved records. + SuccessCount *float32 `json:"successCount,omitempty"` - // Name The name of the field in Swimlane. - Name string `json:"name"` - } `json:"severityConfig,omitempty"` - } `json:"mappings,omitempty"` + // SuccessResults Indicates the objects that are successfully imported, with any metadata if applicable. + // + // NOTE: Objects are only created when all resolvable errors are addressed, including conflict and missing references. + SuccessResults *[]map[string]interface{} `json:"successResults,omitempty"` + } + JSON400 *SavedObjects400Response } -// SwimlaneConfigConnectorType The type of connector. Valid values are `all`, `alerts`, and `cases`. -type SwimlaneConfigConnectorType string - -// SwimlaneSecrets Defines secrets for connectors when type is `.swimlane`. -type SwimlaneSecrets struct { - // ApiToken Swimlane API authentication token. - ApiToken *string `json:"apiToken,omitempty"` +// Status returns HTTPResponse.Status +func (r ResolveImportErrorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// TeamsSecrets Defines secrets for connectors when type is `.teams`. -type TeamsSecrets struct { - // WebhookUrl The URL of the incoming webhook. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - WebhookUrl string `json:"webhookUrl"` +// StatusCode returns HTTPResponse.StatusCode +func (r ResolveImportErrorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// ThehiveConfig Defines configuration properties for connectors when type is `.thehive`. -type ThehiveConfig struct { - // Organisation The organisation in TheHive that will contain the alerts or cases. By default, the connector uses the default organisation of the user account that created the API key. - Organisation *string `json:"organisation,omitempty"` - - // Url The instance URL in TheHive. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - Url string `json:"url"` +type ResolveSavedObjectResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *SavedObjects400Response } -// ThehiveSecrets Defines secrets for connectors when type is `.thehive`. -type ThehiveSecrets struct { - // ApiKey The API key for authentication in TheHive. - ApiKey string `json:"apiKey"` +// Status returns HTTPResponse.Status +func (r ResolveSavedObjectResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// TinesConfig Defines properties for connectors when type is `.tines`. -type TinesConfig struct { - // Url The Tines tenant URL. If you are using the `xpack.actions.allowedHosts` setting, make sure this hostname is added to the allowed hosts. - Url string `json:"url"` +// StatusCode returns HTTPResponse.StatusCode +func (r ResolveSavedObjectResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// TinesSecrets Defines secrets for connectors when type is `.tines`. -type TinesSecrets struct { - // Email The email used to sign in to Tines. - Email string `json:"email"` - - // Token The Tines API token. - Token string `json:"token"` +type CreateSavedObjectResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON409 *map[string]interface{} } -// TorqConfig Defines properties for connectors when type is `.torq`. -type TorqConfig struct { - // WebhookIntegrationUrl The endpoint URL of the Elastic Security integration in Torq. - WebhookIntegrationUrl string `json:"webhookIntegrationUrl"` +// Status returns HTTPResponse.Status +func (r CreateSavedObjectResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// TorqSecrets Defines secrets for connectors when type is `.torq`. -type TorqSecrets struct { - // Token The secret of the webhook authentication header. - Token string `json:"token"` +// StatusCode returns HTTPResponse.StatusCode +func (r CreateSavedObjectResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// UpdateConnectorConfig The connector configuration details. -type UpdateConnectorConfig struct { - AdditionalProperties map[string]interface{} `json:"-"` - union json.RawMessage +type GetSavedObjectResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON400 *SavedObjects400Response } -// UpdateConnectorSecrets defines model for update_connector_secrets. -type UpdateConnectorSecrets struct { - AdditionalProperties map[string]interface{} `json:"-"` - union json.RawMessage +// Status returns HTTPResponse.Status +func (r GetSavedObjectResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// UpdateOutputElasticsearch defines model for update_output_elasticsearch. -type UpdateOutputElasticsearch struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts *[]string `json:"hosts,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Name *string `json:"name,omitempty"` - Preset *UpdateOutputElasticsearchPreset `json:"preset,omitempty"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *struct { - Ssl *struct { - Key *UpdateOutputElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - Shipper *UpdateOutputShipper `json:"shipper,omitempty"` - Ssl *UpdateOutputSsl `json:"ssl,omitempty"` - Type *UpdateOutputElasticsearchType `json:"type,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetSavedObjectResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// UpdateOutputElasticsearchPreset defines model for UpdateOutputElasticsearch.Preset. -type UpdateOutputElasticsearchPreset string - -// UpdateOutputElasticsearchSecretsSslKey0 defines model for . -type UpdateOutputElasticsearchSecretsSslKey0 struct { - Id string `json:"id"` +type CreateSavedObjectIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON409 *map[string]interface{} } -// UpdateOutputElasticsearchSecretsSslKey1 defines model for . -type UpdateOutputElasticsearchSecretsSslKey1 = string +// Status returns HTTPResponse.Status +func (r CreateSavedObjectIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// UpdateOutputElasticsearch_Secrets_Ssl_Key defines model for UpdateOutputElasticsearch.Secrets.Ssl.Key. -type UpdateOutputElasticsearch_Secrets_Ssl_Key struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r CreateSavedObjectIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// UpdateOutputElasticsearchType defines model for UpdateOutputElasticsearch.Type. -type UpdateOutputElasticsearchType string +type UpdateSavedObjectResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} + JSON404 *map[string]interface{} + JSON409 *map[string]interface{} +} -// UpdateOutputKafka defines model for update_output_kafka. -type UpdateOutputKafka struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - AuthType *UpdateOutputKafkaAuthType `json:"auth_type,omitempty"` - BrokerTimeout *float32 `json:"broker_timeout,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ClientId *string `json:"client_id,omitempty"` - Compression *UpdateOutputKafkaCompression `json:"compression,omitempty"` - CompressionLevel interface{} `json:"compression_level"` - ConfigYaml *string `json:"config_yaml,omitempty"` - ConnectionType interface{} `json:"connection_type"` - Hash *struct { - Hash *string `json:"hash,omitempty"` - Random *bool `json:"random,omitempty"` - } `json:"hash,omitempty"` - Headers *[]struct { - Key string `json:"key"` - Value string `json:"value"` - } `json:"headers,omitempty"` - Hosts *[]string `json:"hosts,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Key *string `json:"key,omitempty"` - Name string `json:"name"` - Partition *UpdateOutputKafkaPartition `json:"partition,omitempty"` - Password interface{} `json:"password"` - ProxyId *string `json:"proxy_id,omitempty"` - Random *struct { - GroupEvents *float32 `json:"group_events,omitempty"` - } `json:"random,omitempty"` - RequiredAcks *UpdateOutputKafkaRequiredAcks `json:"required_acks,omitempty"` - RoundRobin *struct { - GroupEvents *float32 `json:"group_events,omitempty"` - } `json:"round_robin,omitempty"` - Sasl *struct { - Mechanism *UpdateOutputKafkaSaslMechanism `json:"mechanism,omitempty"` - } `json:"sasl,omitempty"` - Secrets *struct { - Password *UpdateOutputKafka_Secrets_Password `json:"password,omitempty"` - Ssl *struct { - Key UpdateOutputKafka_Secrets_Ssl_Key `json:"key"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - Shipper *UpdateOutputShipper `json:"shipper,omitempty"` - Ssl *UpdateOutputSsl `json:"ssl,omitempty"` - Timeout *float32 `json:"timeout,omitempty"` - Topic *string `json:"topic,omitempty"` - Type *UpdateOutputKafkaType `json:"type,omitempty"` - Username interface{} `json:"username"` - Version *string `json:"version,omitempty"` +// Status returns HTTPResponse.Status +func (r UpdateSavedObjectResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// UpdateOutputKafkaAuthType defines model for UpdateOutputKafka.AuthType. -type UpdateOutputKafkaAuthType string +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateSavedObjectResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// UpdateOutputKafkaCompression defines model for UpdateOutputKafka.Compression. -type UpdateOutputKafkaCompression string +type GetSecurityRoleResponse struct { + Body []byte + HTTPResponse *http.Response +} -// UpdateOutputKafkaPartition defines model for UpdateOutputKafka.Partition. -type UpdateOutputKafkaPartition string +// Status returns HTTPResponse.Status +func (r GetSecurityRoleResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// UpdateOutputKafkaRequiredAcks defines model for UpdateOutputKafka.RequiredAcks. -type UpdateOutputKafkaRequiredAcks int +// StatusCode returns HTTPResponse.StatusCode +func (r GetSecurityRoleResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// UpdateOutputKafkaSaslMechanism defines model for UpdateOutputKafka.Sasl.Mechanism. -type UpdateOutputKafkaSaslMechanism string +type PostSecurityRoleQueryResponse struct { + Body []byte + HTTPResponse *http.Response +} -// UpdateOutputKafkaSecretsPassword0 defines model for . -type UpdateOutputKafkaSecretsPassword0 struct { - Id string `json:"id"` +// Status returns HTTPResponse.Status +func (r PostSecurityRoleQueryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// UpdateOutputKafkaSecretsPassword1 defines model for . -type UpdateOutputKafkaSecretsPassword1 = string +// StatusCode returns HTTPResponse.StatusCode +func (r PostSecurityRoleQueryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// UpdateOutputKafka_Secrets_Password defines model for UpdateOutputKafka.Secrets.Password. -type UpdateOutputKafka_Secrets_Password struct { - union json.RawMessage +type DeleteSecurityRoleNameResponse struct { + Body []byte + HTTPResponse *http.Response } -// UpdateOutputKafkaSecretsSslKey0 defines model for . -type UpdateOutputKafkaSecretsSslKey0 struct { - Id string `json:"id"` +// Status returns HTTPResponse.Status +func (r DeleteSecurityRoleNameResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// UpdateOutputKafkaSecretsSslKey1 defines model for . -type UpdateOutputKafkaSecretsSslKey1 = string - -// UpdateOutputKafka_Secrets_Ssl_Key defines model for UpdateOutputKafka.Secrets.Ssl.Key. -type UpdateOutputKafka_Secrets_Ssl_Key struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteSecurityRoleNameResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// UpdateOutputKafkaType defines model for UpdateOutputKafka.Type. -type UpdateOutputKafkaType string +type GetSecurityRoleNameResponse struct { + Body []byte + HTTPResponse *http.Response +} -// UpdateOutputLogstash defines model for update_output_logstash. -type UpdateOutputLogstash struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts *[]string `json:"hosts,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Name *string `json:"name,omitempty"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *struct { - Ssl *struct { - Key *UpdateOutputLogstash_Secrets_Ssl_Key `json:"key,omitempty"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - Shipper *UpdateOutputShipper `json:"shipper,omitempty"` - Ssl *UpdateOutputSsl `json:"ssl,omitempty"` - Type *UpdateOutputLogstashType `json:"type,omitempty"` +// Status returns HTTPResponse.Status +func (r GetSecurityRoleNameResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// UpdateOutputLogstashSecretsSslKey0 defines model for . -type UpdateOutputLogstashSecretsSslKey0 struct { - Id string `json:"id"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetSecurityRoleNameResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// UpdateOutputLogstashSecretsSslKey1 defines model for . -type UpdateOutputLogstashSecretsSslKey1 = string +type PutSecurityRoleNameResponse struct { + Body []byte + HTTPResponse *http.Response +} -// UpdateOutputLogstash_Secrets_Ssl_Key defines model for UpdateOutputLogstash.Secrets.Ssl.Key. -type UpdateOutputLogstash_Secrets_Ssl_Key struct { - union json.RawMessage +// Status returns HTTPResponse.Status +func (r PutSecurityRoleNameResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// UpdateOutputLogstashType defines model for UpdateOutputLogstash.Type. -type UpdateOutputLogstashType string +// StatusCode returns HTTPResponse.StatusCode +func (r PutSecurityRoleNameResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// UpdateOutputRemoteElasticsearch defines model for update_output_remote_elasticsearch. -type UpdateOutputRemoteElasticsearch struct { - AllowEdit *[]string `json:"allow_edit,omitempty"` - CaSha256 *string `json:"ca_sha256,omitempty"` - CaTrustedFingerprint *string `json:"ca_trusted_fingerprint,omitempty"` - ConfigYaml *string `json:"config_yaml,omitempty"` - Hosts *[]string `json:"hosts,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultMonitoring *bool `json:"is_default_monitoring,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - KibanaApiKey *string `json:"kibana_api_key,omitempty"` - KibanaUrl *string `json:"kibana_url,omitempty"` - Name *string `json:"name,omitempty"` - Preset *UpdateOutputRemoteElasticsearchPreset `json:"preset,omitempty"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *struct { - ServiceToken *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken `json:"service_token,omitempty"` - Ssl *struct { - Key *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key `json:"key,omitempty"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - ServiceToken *string `json:"service_token,omitempty"` - Shipper *UpdateOutputShipper `json:"shipper,omitempty"` - Ssl *UpdateOutputSsl `json:"ssl,omitempty"` - SyncIntegrations *bool `json:"sync_integrations,omitempty"` - SyncUninstalledIntegrations *bool `json:"sync_uninstalled_integrations,omitempty"` - Type *UpdateOutputRemoteElasticsearchType `json:"type,omitempty"` +type PostSecurityRolesResponse struct { + Body []byte + HTTPResponse *http.Response } -// UpdateOutputRemoteElasticsearchPreset defines model for UpdateOutputRemoteElasticsearch.Preset. -type UpdateOutputRemoteElasticsearchPreset string +// Status returns HTTPResponse.Status +func (r PostSecurityRolesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// UpdateOutputRemoteElasticsearchSecretsServiceToken0 defines model for . -type UpdateOutputRemoteElasticsearchSecretsServiceToken0 struct { - Id string `json:"id"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostSecurityRolesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// UpdateOutputRemoteElasticsearchSecretsServiceToken1 defines model for . -type UpdateOutputRemoteElasticsearchSecretsServiceToken1 = string +type PostSecuritySessionInvalidateResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Total The number of sessions that were successfully invalidated. + Total *int `json:"total,omitempty"` + } +} -// UpdateOutputRemoteElasticsearch_Secrets_ServiceToken defines model for UpdateOutputRemoteElasticsearch.Secrets.ServiceToken. -type UpdateOutputRemoteElasticsearch_Secrets_ServiceToken struct { - union json.RawMessage +// Status returns HTTPResponse.Status +func (r PostSecuritySessionInvalidateResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// UpdateOutputRemoteElasticsearchSecretsSslKey0 defines model for . -type UpdateOutputRemoteElasticsearchSecretsSslKey0 struct { - Id string `json:"id"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostSecuritySessionInvalidateResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// UpdateOutputRemoteElasticsearchSecretsSslKey1 defines model for . -type UpdateOutputRemoteElasticsearchSecretsSslKey1 = string +type PerformAnonymizationFieldsBulkActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIAnonymizationFieldsBulkCrudActionResponse + JSON400 *struct { + // Error Error type or name. + Error *string `json:"error,omitempty"` -// UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key defines model for UpdateOutputRemoteElasticsearch.Secrets.Ssl.Key. -type UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key struct { - union json.RawMessage + // Message Detailed error message. + Message *string `json:"message,omitempty"` + + // StatusCode Status code of the response. + StatusCode *float32 `json:"statusCode,omitempty"` + } } -// UpdateOutputRemoteElasticsearchType defines model for UpdateOutputRemoteElasticsearch.Type. -type UpdateOutputRemoteElasticsearchType string +// Status returns HTTPResponse.Status +func (r PerformAnonymizationFieldsBulkActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// UpdateOutputShipper defines model for update_output_shipper. -type UpdateOutputShipper struct { - CompressionLevel *float32 `json:"compression_level,omitempty"` - DiskQueueCompressionEnabled *bool `json:"disk_queue_compression_enabled,omitempty"` - DiskQueueEnabled *bool `json:"disk_queue_enabled,omitempty"` - DiskQueueEncryptionEnabled *bool `json:"disk_queue_encryption_enabled,omitempty"` - DiskQueueMaxSize *float32 `json:"disk_queue_max_size,omitempty"` - DiskQueuePath *string `json:"disk_queue_path,omitempty"` - Loadbalance *bool `json:"loadbalance,omitempty"` - MaxBatchBytes *float32 `json:"max_batch_bytes,omitempty"` - MemQueueEvents *float32 `json:"mem_queue_events,omitempty"` - QueueFlushTimeout *float32 `json:"queue_flush_timeout,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r PerformAnonymizationFieldsBulkActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// UpdateOutputSsl defines model for update_output_ssl. -type UpdateOutputSsl struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - Key *string `json:"key,omitempty"` - VerificationMode *UpdateOutputSslVerificationMode `json:"verification_mode,omitempty"` +type FindAnonymizationFieldsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Aggregations *struct { + FieldStatus *struct { + Buckets *struct { + Allowed *struct { + DocCount *int `json:"doc_count,omitempty"` + } `json:"allowed,omitempty"` + Anonymized *struct { + DocCount *int `json:"doc_count,omitempty"` + } `json:"anonymized,omitempty"` + Denied *struct { + DocCount *int `json:"doc_count,omitempty"` + } `json:"denied,omitempty"` + } `json:"buckets,omitempty"` + } `json:"field_status,omitempty"` + } `json:"aggregations,omitempty"` + All *[]SecurityAIAssistantAPIAnonymizationFieldResponse `json:"all,omitempty"` + Data []SecurityAIAssistantAPIAnonymizationFieldResponse `json:"data"` + Page int `json:"page"` + PerPage int `json:"perPage"` + Total int `json:"total"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } -// UpdateOutputSslVerificationMode defines model for UpdateOutputSsl.VerificationMode. -type UpdateOutputSslVerificationMode string +// Status returns HTTPResponse.Status +func (r FindAnonymizationFieldsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// UpdateOutputUnion defines model for update_output_union. -type UpdateOutputUnion struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r FindAnonymizationFieldsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// VerificationMode Controls the verification of certificates. Use `full` to validate that the certificate has an issue date within the `not_before` and `not_after` dates, chains to a trusted certificate authority (CA), and has a hostname or IP address that matches the names within the certificate. Use `certificate` to validate the certificate and verify that it is signed by a trusted authority; this option does not check the certificate hostname. Use `none` to skip certificate validation. -type VerificationMode string +type ChatCompleteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *struct { + // Error Error type. + Error *string `json:"error,omitempty"` -// WebhookConfig Defines properties for connectors when type is `.webhook`. -type WebhookConfig struct { - // AuthType The type of authentication to use: basic, SSL, or none. - AuthType *AuthType `json:"authType,omitempty"` + // Message Human-readable error message. + Message *string `json:"message,omitempty"` - // Ca A base64 encoded version of the certificate authority file that the connector can trust to sign and validate certificates. This option is available for all authentication types. - Ca *Ca `json:"ca,omitempty"` + // StatusCode HTTP status code. + StatusCode *float32 `json:"statusCode,omitempty"` + } +} - // CertType If the `authType` is `webhook-authentication-ssl`, specifies whether the certificate authentication data is in a CRT and key file format or a PFX file format. - CertType *CertType `json:"certType,omitempty"` +// Status returns HTTPResponse.Status +func (r ChatCompleteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // HasAuth If true, a username and password for login type authentication must be provided. - HasAuth *HasAuth `json:"hasAuth,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r ChatCompleteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Headers A set of key-value pairs sent as headers with the request. - Headers *map[string]interface{} `json:"headers,omitempty"` +type DeleteAllConversationsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Failures *[]string `json:"failures,omitempty"` + Success *bool `json:"success,omitempty"` + TotalDeleted *float32 `json:"totalDeleted,omitempty"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} - // Method The HTTP request method, either `post` or `put`. - Method *WebhookConfigMethod `json:"method,omitempty"` +// Status returns HTTPResponse.Status +func (r DeleteAllConversationsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Url The request URL. If you are using the `xpack.actions.allowedHosts` setting, add the hostname to the allowed hosts. - Url *string `json:"url,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteAllConversationsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // VerificationMode Controls the verification of certificates. Use `full` to validate that the certificate has an issue date within the `not_before` and `not_after` dates, chains to a trusted certificate authority (CA), and has a hostname or IP address that matches the names within the certificate. Use `certificate` to validate the certificate and verify that it is signed by a trusted authority; this option does not check the certificate hostname. Use `none` to skip certificate validation. - VerificationMode *VerificationMode `json:"verificationMode,omitempty"` +type CreateConversationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIConversationResponse + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } -// WebhookConfigMethod The HTTP request method, either `post` or `put`. -type WebhookConfigMethod string +// Status returns HTTPResponse.Status +func (r CreateConversationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// WebhookSecrets Defines secrets for connectors when type is `.webhook`. -type WebhookSecrets struct { - // Crt If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the CRT or CERT file. - Crt *Crt `json:"crt,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r CreateConversationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Key If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-crt-key`, it is a base64 encoded version of the KEY file. - Key *Key `json:"key,omitempty"` +type FindConversationsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Data A list of conversations. + Data []SecurityAIAssistantAPIConversationResponse `json:"data"` - // Password The password for HTTP basic authentication or the passphrase for the SSL certificate files. If `hasAuth` is set to `true` and `authType` is `webhook-authentication-basic`, this property is required. - Password *string `json:"password,omitempty"` + // Page The current page of the results. + Page int `json:"page"` - // Pfx If `authType` is `webhook-authentication-ssl` and `certType` is `ssl-pfx`, it is a base64 encoded version of the PFX or P12 file. - Pfx *Pfx `json:"pfx,omitempty"` + // PerPage The number of results returned per page. + PerPage int `json:"perPage"` - // User The username for HTTP basic authentication. If `hasAuth` is set to `true` and `authType` is `webhook-authentication-basic`, this property is required. - User *string `json:"user,omitempty"` + // Total The total number of conversations matching the filter criteria. + Total int `json:"total"` + } + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } -// XmattersConfig Defines properties for connectors when type is `.xmatters`. -type XmattersConfig struct { - // ConfigUrl The request URL for the Elastic Alerts trigger in xMatters. It is applicable only when `usesBasic` is `true`. - ConfigUrl *string `json:"configUrl,omitempty"` +// Status returns HTTPResponse.Status +func (r FindConversationsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // UsesBasic Specifies whether the connector uses HTTP basic authentication (`true`) or URL authentication (`false`). - UsesBasic *bool `json:"usesBasic,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r FindConversationsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// XmattersSecrets Defines secrets for connectors when type is `.xmatters`. -type XmattersSecrets struct { - // Password A user name for HTTP basic authentication. It is applicable only when `usesBasic` is `true`. - Password *string `json:"password,omitempty"` +type DeleteConversationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIConversationResponse + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} - // SecretsUrl The request URL for the Elastic Alerts trigger in xMatters with the API key included in the URL. It is applicable only when `usesBasic` is `false`. - SecretsUrl *string `json:"secretsUrl,omitempty"` +// Status returns HTTPResponse.Status +func (r DeleteConversationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // User A password for HTTP basic authentication. It is applicable only when `usesBasic` is `true`. - User *string `json:"user,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteConversationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// APMUIElasticApiVersion defines model for APM_UI_elastic_api_version. -type APMUIElasticApiVersion string +type ReadConversationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIConversationResponse + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } +} -// DataViewsViewId defines model for Data_views_view_id. -type DataViewsViewId = string +// Status returns HTTPResponse.Status +func (r ReadConversationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// SpaceId defines model for spaceId. -type SpaceId = string +// StatusCode returns HTTPResponse.StatusCode +func (r ReadConversationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// DeleteAgentConfigurationParams defines parameters for DeleteAgentConfiguration. -type DeleteAgentConfigurationParams struct { - // ElasticApiVersion The version of the API to use - ElasticApiVersion DeleteAgentConfigurationParamsElasticApiVersion `json:"elastic-api-version"` +type UpdateConversationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIConversationResponse + JSON400 *struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } } -// DeleteAgentConfigurationParamsElasticApiVersion defines parameters for DeleteAgentConfiguration. -type DeleteAgentConfigurationParamsElasticApiVersion string +// Status returns HTTPResponse.Status +func (r UpdateConversationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// GetAgentConfigurationsParams defines parameters for GetAgentConfigurations. -type GetAgentConfigurationsParams struct { - // ElasticApiVersion The version of the API to use - ElasticApiVersion GetAgentConfigurationsParamsElasticApiVersion `json:"elastic-api-version"` +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateConversationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// GetAgentConfigurationsParamsElasticApiVersion defines parameters for GetAgentConfigurations. -type GetAgentConfigurationsParamsElasticApiVersion string +type CreateKnowledgeBaseEntryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIKnowledgeBaseEntryResponse + JSON400 *SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema +} -// CreateUpdateAgentConfigurationParams defines parameters for CreateUpdateAgentConfiguration. -type CreateUpdateAgentConfigurationParams struct { - // Overwrite If the config exists ?overwrite=true is required - Overwrite *bool `form:"overwrite,omitempty" json:"overwrite,omitempty"` +// Status returns HTTPResponse.Status +func (r CreateKnowledgeBaseEntryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // ElasticApiVersion The version of the API to use - ElasticApiVersion CreateUpdateAgentConfigurationParamsElasticApiVersion `json:"elastic-api-version"` +// StatusCode returns HTTPResponse.StatusCode +func (r CreateKnowledgeBaseEntryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// CreateUpdateAgentConfigurationParamsElasticApiVersion defines parameters for CreateUpdateAgentConfiguration. -type CreateUpdateAgentConfigurationParamsElasticApiVersion string +type PerformKnowledgeBaseEntryBulkActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIKnowledgeBaseEntryBulkCrudActionResponse + JSON400 *SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema +} -// GetFleetAgentPoliciesParams defines parameters for GetFleetAgentPolicies. -type GetFleetAgentPoliciesParams struct { - Page *float32 `form:"page,omitempty" json:"page,omitempty"` - PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` - SortField *string `form:"sortField,omitempty" json:"sortField,omitempty"` - SortOrder *GetFleetAgentPoliciesParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` - ShowUpgradeable *bool `form:"showUpgradeable,omitempty" json:"showUpgradeable,omitempty"` - Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` +// Status returns HTTPResponse.Status +func (r PerformKnowledgeBaseEntryBulkActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // NoAgentCount use withAgentCount instead - NoAgentCount *bool `form:"noAgentCount,omitempty" json:"noAgentCount,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r PerformKnowledgeBaseEntryBulkActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // WithAgentCount get policies with agent count - WithAgentCount *bool `form:"withAgentCount,omitempty" json:"withAgentCount,omitempty"` +type FindKnowledgeBaseEntriesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Data The list of Knowledge Base Entries for the current page. + Data []SecurityAIAssistantAPIKnowledgeBaseEntryResponse `json:"data"` - // Full get full policies with package policies populated - Full *bool `form:"full,omitempty" json:"full,omitempty"` - Format *GetFleetAgentPoliciesParamsFormat `form:"format,omitempty" json:"format,omitempty"` -} + // Page The current page number. + Page int `json:"page"` -// GetFleetAgentPoliciesParamsSortOrder defines parameters for GetFleetAgentPolicies. -type GetFleetAgentPoliciesParamsSortOrder string + // PerPage The number of Knowledge Base Entries returned per page. + PerPage int `json:"perPage"` -// GetFleetAgentPoliciesParamsFormat defines parameters for GetFleetAgentPolicies. -type GetFleetAgentPoliciesParamsFormat string + // Total The total number of Knowledge Base Entries available. + Total int `json:"total"` + } + JSON400 *struct { + // Error A short description of the error. + Error *string `json:"error,omitempty"` -// PostFleetAgentPoliciesJSONBody defines parameters for PostFleetAgentPolicies. -type PostFleetAgentPoliciesJSONBody struct { - AdvancedSettings *struct { - AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory,omitempty"` - AgentDownloadTimeout interface{} `json:"agent_download_timeout,omitempty"` - AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs,omitempty"` - AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval,omitempty"` - AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles,omitempty"` - AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes,omitempty"` - AgentLoggingLevel interface{} `json:"agent_logging_level,omitempty"` - AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period,omitempty"` - AgentLoggingToFiles interface{} `json:"agent_logging_to_files,omitempty"` - } `json:"advanced_settings,omitempty"` - AgentFeatures *[]struct { - Enabled bool `json:"enabled"` - Name string `json:"name"` - } `json:"agent_features,omitempty"` - Agentless *struct { - CloudConnectors *struct { - Enabled bool `json:"enabled"` - TargetCsp *string `json:"target_csp,omitempty"` - } `json:"cloud_connectors,omitempty"` - Resources *struct { - Requests *struct { - Cpu *string `json:"cpu,omitempty"` - Memory *string `json:"memory,omitempty"` - } `json:"requests,omitempty"` - } `json:"resources,omitempty"` - } `json:"agentless,omitempty"` - DataOutputId *string `json:"data_output_id,omitempty"` - Description *string `json:"description,omitempty"` - DownloadSourceId *string `json:"download_source_id,omitempty"` - FleetServerHostId *string `json:"fleet_server_host_id,omitempty"` - Force *bool `json:"force,omitempty"` + // Message A detailed message explaining the error. + Message *string `json:"message,omitempty"` - // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. - GlobalDataTags *[]AgentPolicyGlobalDataTagsItem `json:"global_data_tags,omitempty"` - HasFleetServer *bool `json:"has_fleet_server,omitempty"` - Id *string `json:"id,omitempty"` - InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` - IsManaged *bool `json:"is_managed,omitempty"` - IsProtected *bool `json:"is_protected,omitempty"` + // StatusCode The HTTP status code of the error. + StatusCode *float32 `json:"statusCode,omitempty"` + } +} - // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled - KeepMonitoringAlive *bool `json:"keep_monitoring_alive,omitempty"` - MonitoringDiagnostics *struct { - Limit *struct { - Burst *float32 `json:"burst,omitempty"` - Interval *string `json:"interval,omitempty"` - } `json:"limit,omitempty"` - Uploader *struct { - InitDur *string `json:"init_dur,omitempty"` - MaxDur *string `json:"max_dur,omitempty"` - MaxRetries *float32 `json:"max_retries,omitempty"` - } `json:"uploader,omitempty"` - } `json:"monitoring_diagnostics,omitempty"` - MonitoringEnabled *[]PostFleetAgentPoliciesJSONBodyMonitoringEnabled `json:"monitoring_enabled,omitempty"` - MonitoringHttp *struct { - Buffer *struct { - Enabled *bool `json:"enabled,omitempty"` - } `json:"buffer,omitempty"` - Enabled *bool `json:"enabled,omitempty"` - Host *string `json:"host,omitempty"` - Port *float32 `json:"port,omitempty"` - } `json:"monitoring_http,omitempty"` - MonitoringOutputId *string `json:"monitoring_output_id,omitempty"` - MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` - Name string `json:"name"` - Namespace string `json:"namespace"` +// Status returns HTTPResponse.Status +func (r FindKnowledgeBaseEntriesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} + +// StatusCode returns HTTPResponse.StatusCode +func (r FindKnowledgeBaseEntriesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. - Overrides *map[string]interface{} `json:"overrides,omitempty"` - RequiredVersions *[]struct { - // Percentage Target percentage of agents to auto upgrade - Percentage float32 `json:"percentage"` +type DeleteKnowledgeBaseEntryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIDeleteResponseFields + JSON400 *SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema +} - // Version Target version for automatic agent upgrade - Version string `json:"version"` - } `json:"required_versions,omitempty"` - SpaceIds *[]string `json:"space_ids,omitempty"` +// Status returns HTTPResponse.Status +func (r DeleteKnowledgeBaseEntryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // SupportsAgentless Indicates whether the agent policy supports agentless integrations. - SupportsAgentless *bool `json:"supports_agentless,omitempty"` - UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteKnowledgeBaseEntryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PostFleetAgentPoliciesParams defines parameters for PostFleetAgentPolicies. -type PostFleetAgentPoliciesParams struct { - SysMonitoring *bool `form:"sys_monitoring,omitempty" json:"sys_monitoring,omitempty"` +type ReadKnowledgeBaseEntryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIKnowledgeBaseEntryResponse + JSON400 *SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema } -// PostFleetAgentPoliciesJSONBodyMonitoringEnabled defines parameters for PostFleetAgentPolicies. -type PostFleetAgentPoliciesJSONBodyMonitoringEnabled string +// Status returns HTTPResponse.Status +func (r ReadKnowledgeBaseEntryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PostFleetAgentPoliciesDeleteJSONBody defines parameters for PostFleetAgentPoliciesDelete. -type PostFleetAgentPoliciesDeleteJSONBody struct { - AgentPolicyId string `json:"agentPolicyId"` +// StatusCode returns HTTPResponse.StatusCode +func (r ReadKnowledgeBaseEntryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Force bypass validation checks that can prevent agent policy deletion - Force *bool `json:"force,omitempty"` +type UpdateKnowledgeBaseEntryResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIKnowledgeBaseEntryResponse + JSON400 *SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema } -// GetFleetAgentPoliciesAgentpolicyidParams defines parameters for GetFleetAgentPoliciesAgentpolicyid. -type GetFleetAgentPoliciesAgentpolicyidParams struct { - Format *GetFleetAgentPoliciesAgentpolicyidParamsFormat `form:"format,omitempty" json:"format,omitempty"` +// Status returns HTTPResponse.Status +func (r UpdateKnowledgeBaseEntryResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// GetFleetAgentPoliciesAgentpolicyidParamsFormat defines parameters for GetFleetAgentPoliciesAgentpolicyid. -type GetFleetAgentPoliciesAgentpolicyidParamsFormat string +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateKnowledgeBaseEntryResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PutFleetAgentPoliciesAgentpolicyidJSONBody defines parameters for PutFleetAgentPoliciesAgentpolicyid. -type PutFleetAgentPoliciesAgentpolicyidJSONBody struct { - AdvancedSettings *struct { - AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory,omitempty"` - AgentDownloadTimeout interface{} `json:"agent_download_timeout,omitempty"` - AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs,omitempty"` - AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval,omitempty"` - AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles,omitempty"` - AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes,omitempty"` - AgentLoggingLevel interface{} `json:"agent_logging_level,omitempty"` - AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period,omitempty"` - AgentLoggingToFiles interface{} `json:"agent_logging_to_files,omitempty"` - } `json:"advanced_settings,omitempty"` - AgentFeatures *[]struct { - Enabled bool `json:"enabled"` - Name string `json:"name"` - } `json:"agent_features,omitempty"` - Agentless *struct { - CloudConnectors *struct { - Enabled bool `json:"enabled"` - TargetCsp *string `json:"target_csp,omitempty"` - } `json:"cloud_connectors,omitempty"` - Resources *struct { - Requests *struct { - Cpu *string `json:"cpu,omitempty"` - Memory *string `json:"memory,omitempty"` - } `json:"requests,omitempty"` - } `json:"resources,omitempty"` - } `json:"agentless,omitempty"` - BumpRevision *bool `json:"bumpRevision,omitempty"` - DataOutputId *string `json:"data_output_id,omitempty"` - Description *string `json:"description,omitempty"` - DownloadSourceId *string `json:"download_source_id,omitempty"` - FleetServerHostId *string `json:"fleet_server_host_id,omitempty"` - Force *bool `json:"force,omitempty"` +type ReadKnowledgeBaseResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // ElserExists Indicates if the ELSER model exists for the KnowledgeBase. + ElserExists *bool `json:"elser_exists,omitempty"` - // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. - GlobalDataTags *[]AgentPolicyGlobalDataTagsItem `json:"global_data_tags,omitempty"` - HasFleetServer *bool `json:"has_fleet_server,omitempty"` - Id *string `json:"id,omitempty"` - InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` - IsManaged *bool `json:"is_managed,omitempty"` - IsProtected *bool `json:"is_protected,omitempty"` + // IsSetupAvailable Indicates if the setup process is available for the KnowledgeBase. + IsSetupAvailable *bool `json:"is_setup_available,omitempty"` - // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled - KeepMonitoringAlive *bool `json:"keep_monitoring_alive,omitempty"` - MonitoringDiagnostics *struct { - Limit *struct { - Burst *float32 `json:"burst,omitempty"` - Interval *string `json:"interval,omitempty"` - } `json:"limit,omitempty"` - Uploader *struct { - InitDur *string `json:"init_dur,omitempty"` - MaxDur *string `json:"max_dur,omitempty"` - MaxRetries *float32 `json:"max_retries,omitempty"` - } `json:"uploader,omitempty"` - } `json:"monitoring_diagnostics,omitempty"` - MonitoringEnabled *[]PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled `json:"monitoring_enabled,omitempty"` - MonitoringHttp *struct { - Buffer *struct { - Enabled *bool `json:"enabled,omitempty"` - } `json:"buffer,omitempty"` - Enabled *bool `json:"enabled,omitempty"` - Host *string `json:"host,omitempty"` - Port *float32 `json:"port,omitempty"` - } `json:"monitoring_http,omitempty"` - MonitoringOutputId *string `json:"monitoring_output_id,omitempty"` - MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` - Name string `json:"name"` - Namespace string `json:"namespace"` + // IsSetupInProgress Indicates if the setup process is currently in progress. + IsSetupInProgress *bool `json:"is_setup_in_progress,omitempty"` - // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. - Overrides *map[string]interface{} `json:"overrides,omitempty"` - RequiredVersions *[]struct { - // Percentage Target percentage of agents to auto upgrade - Percentage float32 `json:"percentage"` + // ProductDocumentationStatus The status of the product documentation in the KnowledgeBase. + ProductDocumentationStatus *string `json:"product_documentation_status,omitempty"` - // Version Target version for automatic agent upgrade - Version string `json:"version"` - } `json:"required_versions,omitempty"` - SpaceIds *[]string `json:"space_ids,omitempty"` + // SecurityLabsExists Indicates if Security Labs documentation exists in the KnowledgeBase. + SecurityLabsExists *bool `json:"security_labs_exists,omitempty"` - // SupportsAgentless Indicates whether the agent policy supports agentless integrations. - SupportsAgentless *bool `json:"supports_agentless,omitempty"` - UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` + // UserDataExists Indicates if user data exists in the KnowledgeBase. + UserDataExists *bool `json:"user_data_exists,omitempty"` + } + JSON400 *struct { + // Error A short description of the error. + Error *string `json:"error,omitempty"` + + // Message A detailed error message. + Message *string `json:"message,omitempty"` + + // StatusCode The HTTP status code of the error. + StatusCode *float32 `json:"statusCode,omitempty"` + } } -// PutFleetAgentPoliciesAgentpolicyidParams defines parameters for PutFleetAgentPoliciesAgentpolicyid. -type PutFleetAgentPoliciesAgentpolicyidParams struct { - Format *PutFleetAgentPoliciesAgentpolicyidParamsFormat `form:"format,omitempty" json:"format,omitempty"` +// Status returns HTTPResponse.Status +func (r ReadKnowledgeBaseResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PutFleetAgentPoliciesAgentpolicyidParamsFormat defines parameters for PutFleetAgentPoliciesAgentpolicyid. -type PutFleetAgentPoliciesAgentpolicyidParamsFormat string +// StatusCode returns HTTPResponse.StatusCode +func (r ReadKnowledgeBaseResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled defines parameters for PutFleetAgentPoliciesAgentpolicyid. -type PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled string +type CreateKnowledgeBaseResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIKnowledgeBaseResponse + JSON400 *struct { + // Error A short description of the error. + Error *string `json:"error,omitempty"` -// GetFleetEnrollmentApiKeysParams defines parameters for GetFleetEnrollmentApiKeys. -type GetFleetEnrollmentApiKeysParams struct { - Page *float32 `form:"page,omitempty" json:"page,omitempty"` - PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` - Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` -} + // Message A detailed error message. + Message *string `json:"message,omitempty"` -// GetFleetEpmPackagesParams defines parameters for GetFleetEpmPackages. -type GetFleetEpmPackagesParams struct { - Category *string `form:"category,omitempty" json:"category,omitempty"` - Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` - ExcludeInstallStatus *bool `form:"excludeInstallStatus,omitempty" json:"excludeInstallStatus,omitempty"` - WithPackagePoliciesCount *bool `form:"withPackagePoliciesCount,omitempty" json:"withPackagePoliciesCount,omitempty"` + // StatusCode The HTTP status code of the error. + StatusCode *float32 `json:"statusCode,omitempty"` + } } -// PostFleetEpmPackagesParams defines parameters for PostFleetEpmPackages. -type PostFleetEpmPackagesParams struct { - IgnoreMappingUpdateErrors *bool `form:"ignoreMappingUpdateErrors,omitempty" json:"ignoreMappingUpdateErrors,omitempty"` - SkipDataStreamRollover *bool `form:"skipDataStreamRollover,omitempty" json:"skipDataStreamRollover,omitempty"` +// Status returns HTTPResponse.Status +func (r CreateKnowledgeBaseResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// DeleteFleetEpmPackagesPkgnamePkgversionParams defines parameters for DeleteFleetEpmPackagesPkgnamePkgversion. -type DeleteFleetEpmPackagesPkgnamePkgversionParams struct { - Force *bool `form:"force,omitempty" json:"force,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r CreateKnowledgeBaseResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// GetFleetEpmPackagesPkgnamePkgversionParams defines parameters for GetFleetEpmPackagesPkgnamePkgversion. -type GetFleetEpmPackagesPkgnamePkgversionParams struct { - IgnoreUnverified *bool `form:"ignoreUnverified,omitempty" json:"ignoreUnverified,omitempty"` - Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` - Full *bool `form:"full,omitempty" json:"full,omitempty"` - WithMetadata *bool `form:"withMetadata,omitempty" json:"withMetadata,omitempty"` +type PerformPromptsBulkActionResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityAIAssistantAPIPromptsBulkCrudActionResponse + JSON400 *struct { + // Error A short error message. + Error *string `json:"error,omitempty"` + + // Message A detailed error message. + Message *string `json:"message,omitempty"` + + // StatusCode The HTTP status code for the error. + StatusCode *float32 `json:"statusCode,omitempty"` + } } -// PostFleetEpmPackagesPkgnamePkgversionJSONBody defines parameters for PostFleetEpmPackagesPkgnamePkgversion. -type PostFleetEpmPackagesPkgnamePkgversionJSONBody struct { - Force *bool `json:"force,omitempty"` - IgnoreConstraints *bool `json:"ignore_constraints,omitempty"` +// Status returns HTTPResponse.Status +func (r PerformPromptsBulkActionResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PostFleetEpmPackagesPkgnamePkgversionParams defines parameters for PostFleetEpmPackagesPkgnamePkgversion. -type PostFleetEpmPackagesPkgnamePkgversionParams struct { - Prerelease *bool `form:"prerelease,omitempty" json:"prerelease,omitempty"` - IgnoreMappingUpdateErrors *bool `form:"ignoreMappingUpdateErrors,omitempty" json:"ignoreMappingUpdateErrors,omitempty"` - SkipDataStreamRollover *bool `form:"skipDataStreamRollover,omitempty" json:"skipDataStreamRollover,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r PerformPromptsBulkActionResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PostFleetFleetServerHostsJSONBody defines parameters for PostFleetFleetServerHosts. -type PostFleetFleetServerHostsJSONBody struct { - HostUrls []string `json:"host_urls"` - Id *string `json:"id,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - IsPreconfigured *bool `json:"is_preconfigured,omitempty"` - Name string `json:"name"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *struct { - Ssl *struct { - EsKey *PostFleetFleetServerHostsJSONBody_Secrets_Ssl_EsKey `json:"es_key,omitempty"` - Key *PostFleetFleetServerHostsJSONBody_Secrets_Ssl_Key `json:"key,omitempty"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - Ssl *struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - ClientAuth *PostFleetFleetServerHostsJSONBodySslClientAuth `json:"client_auth,omitempty"` - EsCertificate *string `json:"es_certificate,omitempty"` - EsCertificateAuthorities *[]string `json:"es_certificate_authorities,omitempty"` - EsKey *string `json:"es_key,omitempty"` - Key *string `json:"key,omitempty"` - } `json:"ssl,omitempty"` +type FindPromptsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Data The list of prompts returned based on the search query, sorting, and pagination. + Data []SecurityAIAssistantAPIPromptResponse `json:"data"` + + // Page Current page number. + Page int `json:"page"` + + // PerPage Number of prompts per page. + PerPage int `json:"perPage"` + + // Total Total number of prompts matching the query. + Total int `json:"total"` + } + JSON400 *struct { + // Error Short error message. + Error *string `json:"error,omitempty"` + + // Message Detailed description of the error. + Message *string `json:"message,omitempty"` + + // StatusCode HTTP status code for the error. + StatusCode *float32 `json:"statusCode,omitempty"` + } } -// PostFleetFleetServerHostsJSONBodySecretsSslEsKey0 defines parameters for PostFleetFleetServerHosts. -type PostFleetFleetServerHostsJSONBodySecretsSslEsKey0 struct { - Id string `json:"id"` +// Status returns HTTPResponse.Status +func (r FindPromptsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PostFleetFleetServerHostsJSONBodySecretsSslEsKey1 defines parameters for PostFleetFleetServerHosts. -type PostFleetFleetServerHostsJSONBodySecretsSslEsKey1 = string +// StatusCode returns HTTPResponse.StatusCode +func (r FindPromptsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PostFleetFleetServerHostsJSONBody_Secrets_Ssl_EsKey defines parameters for PostFleetFleetServerHosts. -type PostFleetFleetServerHostsJSONBody_Secrets_Ssl_EsKey struct { - union json.RawMessage +type PostUrlResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ShortURLAPIsUrlResponse } -// PostFleetFleetServerHostsJSONBodySecretsSslKey0 defines parameters for PostFleetFleetServerHosts. -type PostFleetFleetServerHostsJSONBodySecretsSslKey0 struct { - Id string `json:"id"` +// Status returns HTTPResponse.Status +func (r PostUrlResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PostFleetFleetServerHostsJSONBodySecretsSslKey1 defines parameters for PostFleetFleetServerHosts. -type PostFleetFleetServerHostsJSONBodySecretsSslKey1 = string +// StatusCode returns HTTPResponse.StatusCode +func (r PostUrlResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PostFleetFleetServerHostsJSONBody_Secrets_Ssl_Key defines parameters for PostFleetFleetServerHosts. -type PostFleetFleetServerHostsJSONBody_Secrets_Ssl_Key struct { - union json.RawMessage +type ResolveUrlResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ShortURLAPIsUrlResponse } -// PostFleetFleetServerHostsJSONBodySslClientAuth defines parameters for PostFleetFleetServerHosts. -type PostFleetFleetServerHostsJSONBodySslClientAuth string +// Status returns HTTPResponse.Status +func (r ResolveUrlResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PutFleetFleetServerHostsItemidJSONBody defines parameters for PutFleetFleetServerHostsItemid. -type PutFleetFleetServerHostsItemidJSONBody struct { - HostUrls *[]string `json:"host_urls,omitempty"` - IsDefault *bool `json:"is_default,omitempty"` - IsInternal *bool `json:"is_internal,omitempty"` - Name *string `json:"name,omitempty"` - ProxyId *string `json:"proxy_id,omitempty"` - Secrets *struct { - Ssl *struct { - EsKey *PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_EsKey `json:"es_key,omitempty"` - Key *PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_Key `json:"key,omitempty"` - } `json:"ssl,omitempty"` - } `json:"secrets,omitempty"` - Ssl *struct { - Certificate *string `json:"certificate,omitempty"` - CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` - ClientAuth *PutFleetFleetServerHostsItemidJSONBodySslClientAuth `json:"client_auth,omitempty"` - EsCertificate *string `json:"es_certificate,omitempty"` - EsCertificateAuthorities *[]string `json:"es_certificate_authorities,omitempty"` - EsKey *string `json:"es_key,omitempty"` - Key *string `json:"key,omitempty"` - } `json:"ssl,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r ResolveUrlResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PutFleetFleetServerHostsItemidJSONBodySecretsSslEsKey0 defines parameters for PutFleetFleetServerHostsItemid. -type PutFleetFleetServerHostsItemidJSONBodySecretsSslEsKey0 struct { - Id string `json:"id"` +type DeleteUrlResponse struct { + Body []byte + HTTPResponse *http.Response } -// PutFleetFleetServerHostsItemidJSONBodySecretsSslEsKey1 defines parameters for PutFleetFleetServerHostsItemid. -type PutFleetFleetServerHostsItemidJSONBodySecretsSslEsKey1 = string +// Status returns HTTPResponse.Status +func (r DeleteUrlResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_EsKey defines parameters for PutFleetFleetServerHostsItemid. -type PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_EsKey struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteUrlResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PutFleetFleetServerHostsItemidJSONBodySecretsSslKey0 defines parameters for PutFleetFleetServerHostsItemid. -type PutFleetFleetServerHostsItemidJSONBodySecretsSslKey0 struct { - Id string `json:"id"` +type GetUrlResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ShortURLAPIsUrlResponse } -// PutFleetFleetServerHostsItemidJSONBodySecretsSslKey1 defines parameters for PutFleetFleetServerHostsItemid. -type PutFleetFleetServerHostsItemidJSONBodySecretsSslKey1 = string +// Status returns HTTPResponse.Status +func (r GetUrlResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_Key defines parameters for PutFleetFleetServerHostsItemid. -type PutFleetFleetServerHostsItemidJSONBody_Secrets_Ssl_Key struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r GetUrlResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PutFleetFleetServerHostsItemidJSONBodySslClientAuth defines parameters for PutFleetFleetServerHostsItemid. -type PutFleetFleetServerHostsItemidJSONBodySslClientAuth string +type PostSpacesCopySavedObjectsResponse struct { + Body []byte + HTTPResponse *http.Response +} -// GetFleetPackagePoliciesParams defines parameters for GetFleetPackagePolicies. -type GetFleetPackagePoliciesParams struct { - Page *float32 `form:"page,omitempty" json:"page,omitempty"` - PerPage *float32 `form:"perPage,omitempty" json:"perPage,omitempty"` - SortField *string `form:"sortField,omitempty" json:"sortField,omitempty"` - SortOrder *GetFleetPackagePoliciesParamsSortOrder `form:"sortOrder,omitempty" json:"sortOrder,omitempty"` - ShowUpgradeable *bool `form:"showUpgradeable,omitempty" json:"showUpgradeable,omitempty"` - Kuery *string `form:"kuery,omitempty" json:"kuery,omitempty"` - Format *GetFleetPackagePoliciesParamsFormat `form:"format,omitempty" json:"format,omitempty"` - WithAgentCount *bool `form:"withAgentCount,omitempty" json:"withAgentCount,omitempty"` +// Status returns HTTPResponse.Status +func (r PostSpacesCopySavedObjectsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// GetFleetPackagePoliciesParamsSortOrder defines parameters for GetFleetPackagePolicies. -type GetFleetPackagePoliciesParamsSortOrder string +// StatusCode returns HTTPResponse.StatusCode +func (r PostSpacesCopySavedObjectsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// GetFleetPackagePoliciesParamsFormat defines parameters for GetFleetPackagePolicies. -type GetFleetPackagePoliciesParamsFormat string +type PostSpacesDisableLegacyUrlAliasesResponse struct { + Body []byte + HTTPResponse *http.Response +} -// PostFleetPackagePoliciesParams defines parameters for PostFleetPackagePolicies. -type PostFleetPackagePoliciesParams struct { - Format *PostFleetPackagePoliciesParamsFormat `form:"format,omitempty" json:"format,omitempty"` +// Status returns HTTPResponse.Status +func (r PostSpacesDisableLegacyUrlAliasesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PostFleetPackagePoliciesParamsFormat defines parameters for PostFleetPackagePolicies. -type PostFleetPackagePoliciesParamsFormat string +// StatusCode returns HTTPResponse.StatusCode +func (r PostSpacesDisableLegacyUrlAliasesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// DeleteFleetPackagePoliciesPackagepolicyidParams defines parameters for DeleteFleetPackagePoliciesPackagepolicyid. -type DeleteFleetPackagePoliciesPackagepolicyidParams struct { - Force *bool `form:"force,omitempty" json:"force,omitempty"` +type PostSpacesGetShareableReferencesResponse struct { + Body []byte + HTTPResponse *http.Response } -// GetFleetPackagePoliciesPackagepolicyidParams defines parameters for GetFleetPackagePoliciesPackagepolicyid. -type GetFleetPackagePoliciesPackagepolicyidParams struct { - Format *GetFleetPackagePoliciesPackagepolicyidParamsFormat `form:"format,omitempty" json:"format,omitempty"` +// Status returns HTTPResponse.Status +func (r PostSpacesGetShareableReferencesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// GetFleetPackagePoliciesPackagepolicyidParamsFormat defines parameters for GetFleetPackagePoliciesPackagepolicyid. -type GetFleetPackagePoliciesPackagepolicyidParamsFormat string +// StatusCode returns HTTPResponse.StatusCode +func (r PostSpacesGetShareableReferencesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PutFleetPackagePoliciesPackagepolicyidParams defines parameters for PutFleetPackagePoliciesPackagepolicyid. -type PutFleetPackagePoliciesPackagepolicyidParams struct { - Format *PutFleetPackagePoliciesPackagepolicyidParamsFormat `form:"format,omitempty" json:"format,omitempty"` +type PostSpacesResolveCopySavedObjectsErrorsResponse struct { + Body []byte + HTTPResponse *http.Response } -// PutFleetPackagePoliciesPackagepolicyidParamsFormat defines parameters for PutFleetPackagePoliciesPackagepolicyid. -type PutFleetPackagePoliciesPackagepolicyidParamsFormat string +// Status returns HTTPResponse.Status +func (r PostSpacesResolveCopySavedObjectsErrorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PostParametersJSONBody defines parameters for PostParameters. -type PostParametersJSONBody struct { - union json.RawMessage +// StatusCode returns HTTPResponse.StatusCode +func (r PostSpacesResolveCopySavedObjectsErrorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 } -// PostParametersJSONBody0 defines parameters for PostParameters. -type PostParametersJSONBody0 = []SyntheticsParameterRequest +type PostSpacesUpdateObjectsSpacesResponse struct { + Body []byte + HTTPResponse *http.Response +} -// PutParameterJSONBody defines parameters for PutParameter. -type PutParameterJSONBody struct { - // Description The updated description of the parameter. - Description *string `json:"description,omitempty"` +// Status returns HTTPResponse.Status +func (r PostSpacesUpdateObjectsSpacesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Key The key of the parameter. - Key *string `json:"key,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostSpacesUpdateObjectsSpacesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Tags An array of updated tags to categorize the parameter. - Tags *[]string `json:"tags,omitempty"` +type GetSpacesSpaceResponse struct { + Body []byte + HTTPResponse *http.Response +} - // Value The updated value associated with the parameter. - Value *string `json:"value,omitempty"` +// Status returns HTTPResponse.Status +func (r GetSpacesSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PostActionsConnectorIdJSONBody defines parameters for PostActionsConnectorId. -type PostActionsConnectorIdJSONBody struct { - // Config The connector configuration details. - Config *CreateConnectorConfig `json:"config,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetSpacesSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // ConnectorTypeId The type of connector. - ConnectorTypeId string `json:"connector_type_id"` +type PostSpacesSpaceResponse struct { + Body []byte + HTTPResponse *http.Response +} - // Name The display name for the connector. - Name string `json:"name"` - Secrets *CreateConnectorSecrets `json:"secrets,omitempty"` +// Status returns HTTPResponse.Status +func (r PostSpacesSpaceResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) } -// PutActionsConnectorIdJSONBody defines parameters for PutActionsConnectorId. -type PutActionsConnectorIdJSONBody struct { - // Config The connector configuration details. - Config *UpdateConnectorConfig `json:"config,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostSpacesSpaceResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Name The display name for the connector. - Name string `json:"name"` - Secrets *UpdateConnectorSecrets `json:"secrets,omitempty"` +type DeleteSpacesSpaceIdResponse struct { + Body []byte + HTTPResponse *http.Response } -// DeleteAgentConfigurationJSONRequestBody defines body for DeleteAgentConfiguration for application/json ContentType. -type DeleteAgentConfigurationJSONRequestBody = APMUIDeleteServiceObject +// Status returns HTTPResponse.Status +func (r DeleteSpacesSpaceIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// CreateUpdateAgentConfigurationJSONRequestBody defines body for CreateUpdateAgentConfiguration for application/json ContentType. -type CreateUpdateAgentConfigurationJSONRequestBody = APMUIAgentConfigurationIntakeObject +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteSpacesSpaceIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PostMaintenanceWindowJSONBody defines parameters for PostMaintenanceWindow. +type GetSpacesSpaceIdResponse struct { + Body []byte + HTTPResponse *http.Response +} -type PostMaintenanceWindowJSONBody struct { - // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. - Enabled *bool `json:"enabled,omitempty"` - Schedule struct { - Custom struct { - // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. - Duration string `json:"duration"` - Recurring *struct { - // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. - End *string `json:"end,omitempty"` +// Status returns HTTPResponse.Status +func (r GetSpacesSpaceIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. - Every *string `json:"every,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetSpacesSpaceIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Occurrences The total number of recurrences of the schedule. - Occurrences *float32 `json:"occurrences,omitempty"` +type PutSpacesSpaceIdResponse struct { + Body []byte + HTTPResponse *http.Response +} - // OnMonth The specific months for a recurring schedule. Valid values are 1-12. - OnMonth *[]float32 `json:"onMonth,omitempty"` +// Status returns HTTPResponse.Status +func (r PutSpacesSpaceIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. - OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r PutSpacesSpaceIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. - OnWeekDay *[]string `json:"onWeekDay,omitempty"` - } `json:"recurring,omitempty"` +type GetStatusResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + union json.RawMessage + } + JSON503 *struct { + union json.RawMessage + } +} - // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. - Start string `json:"start"` +// Status returns HTTPResponse.Status +func (r GetStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Timezone The timezone of the schedule. The default timezone is UTC. - Timezone *string `json:"timezone,omitempty"` - } `json:"custom"` - } `json:"schedule"` - Scope *struct { - Alerting struct { - Query struct { - // Kql A filter written in Kibana Query Language (KQL). Only alerts matching this query will be supressed by the maintenance window. - Kql string `json:"kql"` - } `json:"query"` - } `json:"alerting"` - } `json:"scope,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Title The name of the maintenance window. While this name does not have to be unique, a distinctive name can help you identify a specific maintenance window. - Title string `json:"title"` +type GetStreamsResponse struct { + Body []byte + HTTPResponse *http.Response } -// PatchMaintenanceWindowIdJSONBody defines parameters for PatchMaintenanceWindowId. -type PatchMaintenanceWindowIdJSONBody struct { - // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. - Enabled *bool `json:"enabled,omitempty"` - Schedule *struct { - Custom struct { - // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. - Duration string `json:"duration"` - Recurring *struct { - // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. - End *string `json:"end,omitempty"` +// Status returns HTTPResponse.Status +func (r GetStreamsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. - Every *string `json:"every,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r GetStreamsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Occurrences The total number of recurrences of the schedule. - Occurrences *float32 `json:"occurrences,omitempty"` +type PostStreamsDisableResponse struct { + Body []byte + HTTPResponse *http.Response +} - // OnMonth The specific months for a recurring schedule. Valid values are 1-12. - OnMonth *[]float32 `json:"onMonth,omitempty"` +// Status returns HTTPResponse.Status +func (r PostStreamsDisableResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. - OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostStreamsDisableResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. - OnWeekDay *[]string `json:"onWeekDay,omitempty"` - } `json:"recurring,omitempty"` +type PostStreamsEnableResponse struct { + Body []byte + HTTPResponse *http.Response +} - // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. - Start string `json:"start"` +// Status returns HTTPResponse.Status +func (r PostStreamsEnableResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} - // Timezone The timezone of the schedule. The default timezone is UTC. - Timezone *string `json:"timezone,omitempty"` - } `json:"custom"` - } `json:"schedule,omitempty"` - Scope *struct { - Alerting struct { - Query struct { - // Kql A filter written in Kibana Query Language (KQL). Only alerts matching this query will be supressed by the maintenance window. - Kql string `json:"kql"` - } `json:"query"` - } `json:"alerting"` - } `json:"scope,omitempty"` +// StatusCode returns HTTPResponse.StatusCode +func (r PostStreamsEnableResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} - // Title The name of the maintenance window. While this name does not have to be unique, a distinctive name can help you identify a specific maintenance window. - Title *string `json:"title,omitempty"` +type PostStreamsResyncResponse struct { + Body []byte + HTTPResponse *http.Response } -// PostFleetAgentPoliciesJSONRequestBody defines body for PostFleetAgentPolicies for application/json ContentType. -type PostFleetAgentPoliciesJSONRequestBody PostFleetAgentPoliciesJSONBody +// Status returns HTTPResponse.Status +func (r PostStreamsResyncResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PostFleetAgentPoliciesDeleteJSONRequestBody defines body for PostFleetAgentPoliciesDelete for application/json ContentType. -type PostFleetAgentPoliciesDeleteJSONRequestBody PostFleetAgentPoliciesDeleteJSONBody +// StatusCode returns HTTPResponse.StatusCode +func (r PostStreamsResyncResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PutFleetAgentPoliciesAgentpolicyidJSONRequestBody defines body for PutFleetAgentPoliciesAgentpolicyid for application/json ContentType. -type PutFleetAgentPoliciesAgentpolicyidJSONRequestBody PutFleetAgentPoliciesAgentpolicyidJSONBody +type DeleteStreamsNameResponse struct { + Body []byte + HTTPResponse *http.Response +} -// PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody defines body for PostFleetEpmPackagesPkgnamePkgversion for application/json ContentType. -type PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody PostFleetEpmPackagesPkgnamePkgversionJSONBody +// Status returns HTTPResponse.Status +func (r DeleteStreamsNameResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PostFleetFleetServerHostsJSONRequestBody defines body for PostFleetFleetServerHosts for application/json ContentType. -type PostFleetFleetServerHostsJSONRequestBody PostFleetFleetServerHostsJSONBody +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteStreamsNameResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PutFleetFleetServerHostsItemidJSONRequestBody defines body for PutFleetFleetServerHostsItemid for application/json ContentType. -type PutFleetFleetServerHostsItemidJSONRequestBody PutFleetFleetServerHostsItemidJSONBody +type GetStreamsNameResponse struct { + Body []byte + HTTPResponse *http.Response +} -// PostFleetOutputsJSONRequestBody defines body for PostFleetOutputs for application/json ContentType. -type PostFleetOutputsJSONRequestBody = NewOutputUnion +// Status returns HTTPResponse.Status +func (r GetStreamsNameResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PutFleetOutputsOutputidJSONRequestBody defines body for PutFleetOutputsOutputid for application/json ContentType. -type PutFleetOutputsOutputidJSONRequestBody = UpdateOutputUnion +// StatusCode returns HTTPResponse.StatusCode +func (r GetStreamsNameResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PostFleetPackagePoliciesJSONRequestBody defines body for PostFleetPackagePolicies for application/json ContentType. -type PostFleetPackagePoliciesJSONRequestBody = PackagePolicyRequest +type PutStreamsNameResponse struct { + Body []byte + HTTPResponse *http.Response +} -// PutFleetPackagePoliciesPackagepolicyidJSONRequestBody defines body for PutFleetPackagePoliciesPackagepolicyid for application/json ContentType. -type PutFleetPackagePoliciesPackagepolicyidJSONRequestBody = PackagePolicyRequest +// Status returns HTTPResponse.Status +func (r PutStreamsNameResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PostParametersJSONRequestBody defines body for PostParameters for application/json ContentType. -type PostParametersJSONRequestBody PostParametersJSONBody +// StatusCode returns HTTPResponse.StatusCode +func (r PutStreamsNameResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PutParameterJSONRequestBody defines body for PutParameter for application/json ContentType. -type PutParameterJSONRequestBody PutParameterJSONBody +type PostStreamsNameForkResponse struct { + Body []byte + HTTPResponse *http.Response +} -// PostActionsConnectorIdJSONRequestBody defines body for PostActionsConnectorId for application/json ContentType. -type PostActionsConnectorIdJSONRequestBody PostActionsConnectorIdJSONBody +// Status returns HTTPResponse.Status +func (r PostStreamsNameForkResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PutActionsConnectorIdJSONRequestBody defines body for PutActionsConnectorId for application/json ContentType. -type PutActionsConnectorIdJSONRequestBody PutActionsConnectorIdJSONBody +// StatusCode returns HTTPResponse.StatusCode +func (r PostStreamsNameForkResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// CreateDataViewDefaultwJSONRequestBody defines body for CreateDataViewDefaultw for application/json ContentType. -type CreateDataViewDefaultwJSONRequestBody = DataViewsCreateDataViewRequestObject +type GetStreamsNameGroupResponse struct { + Body []byte + HTTPResponse *http.Response +} -// UpdateDataViewDefaultJSONRequestBody defines body for UpdateDataViewDefault for application/json ContentType. -type UpdateDataViewDefaultJSONRequestBody = DataViewsUpdateDataViewRequestObject +// Status returns HTTPResponse.Status +func (r GetStreamsNameGroupResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status + } + return http.StatusText(0) +} -// PostMaintenanceWindowJSONRequestBody defines body for PostMaintenanceWindow for application/json ContentType. -type PostMaintenanceWindowJSONRequestBody PostMaintenanceWindowJSONBody +// StatusCode returns HTTPResponse.StatusCode +func (r GetStreamsNameGroupResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode + } + return 0 +} -// PatchMaintenanceWindowIdJSONRequestBody defines body for PatchMaintenanceWindowId for application/json ContentType. -type PatchMaintenanceWindowIdJSONRequestBody PatchMaintenanceWindowIdJSONBody +type PutStreamsNameGroupResponse struct { + Body []byte + HTTPResponse *http.Response +} -// Getter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges. Returns the specified -// element and whether it was found -func (a AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// Status returns HTTPResponse.Status +func (r PutStreamsNameGroupResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - return + return http.StatusText(0) } -// Setter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges -func (a *AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// StatusCode returns HTTPResponse.StatusCode +func (r PutStreamsNameGroupResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - a.AdditionalProperties[fieldName] = value + return 0 } -// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges to handle AdditionalProperties -func (a *AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err - } +type GetStreamsNameIngestResponse struct { + Body []byte + HTTPResponse *http.Response +} - if raw, found := object["cluster"]; found { - err = json.Unmarshal(raw, &a.Cluster) - if err != nil { - return fmt.Errorf("error reading 'cluster': %w", err) - } - delete(object, "cluster") +// Status returns HTTPResponse.Status +func (r GetStreamsNameIngestResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetStreamsNameIngestResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges to handle AdditionalProperties -func (a AgentPolicy_PackagePolicies_1_Elasticsearch_Privileges) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +type PutStreamsNameIngestResponse struct { + Body []byte + HTTPResponse *http.Response +} - if a.Cluster != nil { - object["cluster"], err = json.Marshal(a.Cluster) - if err != nil { - return nil, fmt.Errorf("error marshaling 'cluster': %w", err) - } +// Status returns HTTPResponse.Status +func (r PutStreamsNameIngestResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r PutStreamsNameIngestResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return json.Marshal(object) + return 0 } -// Getter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch. Returns the specified -// element and whether it was found -func (a AgentPolicy_PackagePolicies_1_Elasticsearch) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] - } - return +type PostStreamsNameContentExportResponse struct { + Body []byte + HTTPResponse *http.Response } -// Setter for additional properties for AgentPolicy_PackagePolicies_1_Elasticsearch -func (a *AgentPolicy_PackagePolicies_1_Elasticsearch) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// Status returns HTTPResponse.Status +func (r PostStreamsNameContentExportResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - a.AdditionalProperties[fieldName] = value + return http.StatusText(0) } -// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch to handle AdditionalProperties -func (a *AgentPolicy_PackagePolicies_1_Elasticsearch) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err +// StatusCode returns HTTPResponse.StatusCode +func (r PostStreamsNameContentExportResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["privileges"]; found { - err = json.Unmarshal(raw, &a.Privileges) - if err != nil { - return fmt.Errorf("error reading 'privileges': %w", err) - } - delete(object, "privileges") - } +type PostStreamsNameContentImportResponse struct { + Body []byte + HTTPResponse *http.Response +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// Status returns HTTPResponse.Status +func (r PostStreamsNameContentImportResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - return nil + return http.StatusText(0) } -// Override default JSON handling for AgentPolicy_PackagePolicies_1_Elasticsearch to handle AdditionalProperties -func (a AgentPolicy_PackagePolicies_1_Elasticsearch) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - if a.Privileges != nil { - object["privileges"], err = json.Marshal(a.Privileges) - if err != nil { - return nil, fmt.Errorf("error marshaling 'privileges': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r PostStreamsNameContentImportResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +type GetStreamsNameDashboardsResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r GetStreamsNameDashboardsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - return json.Marshal(object) + return http.StatusText(0) } -// Getter for additional properties for CreateConnectorConfig. Returns the specified -// element and whether it was found -func (a CreateConnectorConfig) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// StatusCode returns HTTPResponse.StatusCode +func (r GetStreamsNameDashboardsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return + return 0 } -// Setter for additional properties for CreateConnectorConfig -func (a *CreateConnectorConfig) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) - } - a.AdditionalProperties[fieldName] = value +type PostStreamsNameDashboardsBulkResponse struct { + Body []byte + HTTPResponse *http.Response } -// Getter for additional properties for CreateConnectorSecrets. Returns the specified -// element and whether it was found -func (a CreateConnectorSecrets) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// Status returns HTTPResponse.Status +func (r PostStreamsNameDashboardsBulkResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - return + return http.StatusText(0) } -// Setter for additional properties for CreateConnectorSecrets -func (a *CreateConnectorSecrets) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// StatusCode returns HTTPResponse.StatusCode +func (r PostStreamsNameDashboardsBulkResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - a.AdditionalProperties[fieldName] = value + return 0 } -// Getter for additional properties for OutputElasticsearch. Returns the specified -// element and whether it was found -func (a OutputElasticsearch) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] - } - return +type DeleteStreamsNameDashboardsDashboardidResponse struct { + Body []byte + HTTPResponse *http.Response } -// Setter for additional properties for OutputElasticsearch -func (a *OutputElasticsearch) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// Status returns HTTPResponse.Status +func (r DeleteStreamsNameDashboardsDashboardidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - a.AdditionalProperties[fieldName] = value + return http.StatusText(0) } -// Override default JSON handling for OutputElasticsearch to handle AdditionalProperties -func (a *OutputElasticsearch) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteStreamsNameDashboardsDashboardidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["allow_edit"]; found { - err = json.Unmarshal(raw, &a.AllowEdit) - if err != nil { - return fmt.Errorf("error reading 'allow_edit': %w", err) - } - delete(object, "allow_edit") - } +type PutStreamsNameDashboardsDashboardidResponse struct { + Body []byte + HTTPResponse *http.Response +} - if raw, found := object["ca_sha256"]; found { - err = json.Unmarshal(raw, &a.CaSha256) - if err != nil { - return fmt.Errorf("error reading 'ca_sha256': %w", err) - } - delete(object, "ca_sha256") +// Status returns HTTPResponse.Status +func (r PutStreamsNameDashboardsDashboardidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["ca_trusted_fingerprint"]; found { - err = json.Unmarshal(raw, &a.CaTrustedFingerprint) - if err != nil { - return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) - } - delete(object, "ca_trusted_fingerprint") +// StatusCode returns HTTPResponse.StatusCode +func (r PutStreamsNameDashboardsDashboardidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["config_yaml"]; found { - err = json.Unmarshal(raw, &a.ConfigYaml) - if err != nil { - return fmt.Errorf("error reading 'config_yaml': %w", err) - } - delete(object, "config_yaml") - } +type GetStreamsNameQueriesResponse struct { + Body []byte + HTTPResponse *http.Response +} - if raw, found := object["hosts"]; found { - err = json.Unmarshal(raw, &a.Hosts) - if err != nil { - return fmt.Errorf("error reading 'hosts': %w", err) - } - delete(object, "hosts") +// Status returns HTTPResponse.Status +func (r GetStreamsNameQueriesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// StatusCode returns HTTPResponse.StatusCode +func (r GetStreamsNameQueriesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["is_default"]; found { - err = json.Unmarshal(raw, &a.IsDefault) - if err != nil { - return fmt.Errorf("error reading 'is_default': %w", err) - } - delete(object, "is_default") - } +type PostStreamsNameQueriesBulkResponse struct { + Body []byte + HTTPResponse *http.Response +} - if raw, found := object["is_default_monitoring"]; found { - err = json.Unmarshal(raw, &a.IsDefaultMonitoring) - if err != nil { - return fmt.Errorf("error reading 'is_default_monitoring': %w", err) - } - delete(object, "is_default_monitoring") +// Status returns HTTPResponse.Status +func (r PostStreamsNameQueriesBulkResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["is_internal"]; found { - err = json.Unmarshal(raw, &a.IsInternal) - if err != nil { - return fmt.Errorf("error reading 'is_internal': %w", err) - } - delete(object, "is_internal") +// StatusCode returns HTTPResponse.StatusCode +func (r PostStreamsNameQueriesBulkResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["is_preconfigured"]; found { - err = json.Unmarshal(raw, &a.IsPreconfigured) - if err != nil { - return fmt.Errorf("error reading 'is_preconfigured': %w", err) - } - delete(object, "is_preconfigured") - } +type DeleteStreamsNameQueriesQueryidResponse struct { + Body []byte + HTTPResponse *http.Response +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// Status returns HTTPResponse.Status +func (r DeleteStreamsNameQueriesQueryidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["preset"]; found { - err = json.Unmarshal(raw, &a.Preset) - if err != nil { - return fmt.Errorf("error reading 'preset': %w", err) - } - delete(object, "preset") +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteStreamsNameQueriesQueryidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["proxy_id"]; found { - err = json.Unmarshal(raw, &a.ProxyId) - if err != nil { - return fmt.Errorf("error reading 'proxy_id': %w", err) - } - delete(object, "proxy_id") - } +type PutStreamsNameQueriesQueryidResponse struct { + Body []byte + HTTPResponse *http.Response +} - if raw, found := object["secrets"]; found { - err = json.Unmarshal(raw, &a.Secrets) - if err != nil { - return fmt.Errorf("error reading 'secrets': %w", err) - } - delete(object, "secrets") +// Status returns HTTPResponse.Status +func (r PutStreamsNameQueriesQueryidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["shipper"]; found { - err = json.Unmarshal(raw, &a.Shipper) - if err != nil { - return fmt.Errorf("error reading 'shipper': %w", err) - } - delete(object, "shipper") +// StatusCode returns HTTPResponse.StatusCode +func (r PutStreamsNameQueriesQueryidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["ssl"]; found { - err = json.Unmarshal(raw, &a.Ssl) - if err != nil { - return fmt.Errorf("error reading 'ssl': %w", err) - } - delete(object, "ssl") - } +type GetStreamsNameRulesResponse struct { + Body []byte + HTTPResponse *http.Response +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +// Status returns HTTPResponse.Status +func (r GetStreamsNameRulesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetStreamsNameRulesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for OutputElasticsearch to handle AdditionalProperties -func (a OutputElasticsearch) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +type DeleteStreamsNameRulesRuleidResponse struct { + Body []byte + HTTPResponse *http.Response +} - if a.AllowEdit != nil { - object["allow_edit"], err = json.Marshal(a.AllowEdit) - if err != nil { - return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) - } +// Status returns HTTPResponse.Status +func (r DeleteStreamsNameRulesRuleidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.CaSha256 != nil { - object["ca_sha256"], err = json.Marshal(a.CaSha256) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteStreamsNameRulesRuleidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.CaTrustedFingerprint != nil { - object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) - } - } +type PutStreamsNameRulesRuleidResponse struct { + Body []byte + HTTPResponse *http.Response +} - if a.ConfigYaml != nil { - object["config_yaml"], err = json.Marshal(a.ConfigYaml) - if err != nil { - return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) - } +// Status returns HTTPResponse.Status +func (r PutStreamsNameRulesRuleidResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - object["hosts"], err = json.Marshal(a.Hosts) - if err != nil { - return nil, fmt.Errorf("error marshaling 'hosts': %w", err) +// StatusCode returns HTTPResponse.StatusCode +func (r PutStreamsNameRulesRuleidResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.Id != nil { - object["id"], err = json.Marshal(a.Id) - if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) - } - } +type GetStreamsNameSignificantEventsResponse struct { + Body []byte + HTTPResponse *http.Response +} - if a.IsDefault != nil { - object["is_default"], err = json.Marshal(a.IsDefault) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_default': %w", err) - } +// Status returns HTTPResponse.Status +func (r GetStreamsNameSignificantEventsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.IsDefaultMonitoring != nil { - object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetStreamsNameSignificantEventsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.IsInternal != nil { - object["is_internal"], err = json.Marshal(a.IsInternal) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) - } - } +type GetStreamsNameSignificantEventsGenerateResponse struct { + Body []byte + HTTPResponse *http.Response +} - if a.IsPreconfigured != nil { - object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) - } +// Status returns HTTPResponse.Status +func (r GetStreamsNameSignificantEventsGenerateResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - object["name"], err = json.Marshal(a.Name) - if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) +// StatusCode returns HTTPResponse.StatusCode +func (r GetStreamsNameSignificantEventsGenerateResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.Preset != nil { - object["preset"], err = json.Marshal(a.Preset) - if err != nil { - return nil, fmt.Errorf("error marshaling 'preset': %w", err) - } - } +type PostStreamsNameSignificantEventsPreviewResponse struct { + Body []byte + HTTPResponse *http.Response +} - if a.ProxyId != nil { - object["proxy_id"], err = json.Marshal(a.ProxyId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) - } +// Status returns HTTPResponse.Status +func (r PostStreamsNameSignificantEventsPreviewResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.Secrets != nil { - object["secrets"], err = json.Marshal(a.Secrets) - if err != nil { - return nil, fmt.Errorf("error marshaling 'secrets': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r PostStreamsNameSignificantEventsPreviewResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.Shipper != nil { - object["shipper"], err = json.Marshal(a.Shipper) - if err != nil { - return nil, fmt.Errorf("error marshaling 'shipper': %w", err) - } - } +type PostSyntheticsMonitorTestResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // Errors Array of errors encountered while triggering the test, one per service location. + Errors *[]struct { + Error struct { + // FailedMonitors Optional list of monitors that failed at the location. + FailedMonitors *[]map[string]interface{} `json:"failed_monitors"` - if a.Ssl != nil { - object["ssl"], err = json.Marshal(a.Ssl) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ssl': %w", err) - } - } + // Reason Human-readable explanation of the failure. + Reason string `json:"reason"` - object["type"], err = json.Marshal(a.Type) - if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) - } + // Status HTTP status code returned by the agent. + Status int `json:"status"` + } `json:"error"` - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } + // LocationId Identifier of the service location where the error occurred. + LocationId string `json:"locationId"` + } `json:"errors,omitempty"` + + // TestRunId Unique identifier for the triggered test run. + TestRunId string `json:"testRunId"` } - return json.Marshal(object) } -// Getter for additional properties for OutputElasticsearchSecretsSslKey0. Returns the specified -// element and whether it was found -func (a OutputElasticsearchSecretsSslKey0) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// Status returns HTTPResponse.Status +func (r PostSyntheticsMonitorTestResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - return + return http.StatusText(0) } -// Setter for additional properties for OutputElasticsearchSecretsSslKey0 -func (a *OutputElasticsearchSecretsSslKey0) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// StatusCode returns HTTPResponse.StatusCode +func (r PostSyntheticsMonitorTestResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - a.AdditionalProperties[fieldName] = value + return 0 } -// Override default JSON handling for OutputElasticsearchSecretsSslKey0 to handle AdditionalProperties -func (a *OutputElasticsearchSecretsSslKey0) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err - } +type GetSyntheticMonitorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// Status returns HTTPResponse.Status +func (r GetSyntheticMonitorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetSyntheticMonitorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for OutputElasticsearchSecretsSslKey0 to handle AdditionalProperties -func (a OutputElasticsearchSecretsSslKey0) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +type PostSyntheticMonitorsResponse struct { + Body []byte + HTTPResponse *http.Response +} - object["id"], err = json.Marshal(a.Id) - if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) +// Status returns HTTPResponse.Status +func (r PostSyntheticMonitorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r PostSyntheticMonitorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return json.Marshal(object) + return 0 } -// Getter for additional properties for OutputElasticsearch_Secrets_Ssl. Returns the specified -// element and whether it was found -func (a OutputElasticsearch_Secrets_Ssl) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +type DeleteSyntheticMonitorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]struct { + // Deleted If it is `true`, the monitor was successfully deleted If it is `false`, the monitor was not deleted. + Deleted *bool `json:"deleted,omitempty"` + + // Ids The unique identifier of the deleted monitor. + Ids *string `json:"ids,omitempty"` } - return } -// Setter for additional properties for OutputElasticsearch_Secrets_Ssl -func (a *OutputElasticsearch_Secrets_Ssl) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// Status returns HTTPResponse.Status +func (r DeleteSyntheticMonitorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - a.AdditionalProperties[fieldName] = value + return http.StatusText(0) } -// Override default JSON handling for OutputElasticsearch_Secrets_Ssl to handle AdditionalProperties -func (a *OutputElasticsearch_Secrets_Ssl) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteSyntheticMonitorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["key"]; found { - err = json.Unmarshal(raw, &a.Key) - if err != nil { - return fmt.Errorf("error reading 'key': %w", err) - } - delete(object, "key") +type DeleteSyntheticMonitorResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r DeleteSyntheticMonitorResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteSyntheticMonitorResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for OutputElasticsearch_Secrets_Ssl to handle AdditionalProperties -func (a OutputElasticsearch_Secrets_Ssl) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +type GetSyntheticMonitorResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} +} - if a.Key != nil { - object["key"], err = json.Marshal(a.Key) - if err != nil { - return nil, fmt.Errorf("error marshaling 'key': %w", err) - } +// Status returns HTTPResponse.Status +func (r GetSyntheticMonitorResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetSyntheticMonitorResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return json.Marshal(object) + return 0 } -// Getter for additional properties for OutputElasticsearch_Secrets. Returns the specified -// element and whether it was found -func (a OutputElasticsearch_Secrets) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] - } - return +type PutSyntheticMonitorResponse struct { + Body []byte + HTTPResponse *http.Response } -// Setter for additional properties for OutputElasticsearch_Secrets -func (a *OutputElasticsearch_Secrets) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// Status returns HTTPResponse.Status +func (r PutSyntheticMonitorResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - a.AdditionalProperties[fieldName] = value + return http.StatusText(0) } -// Override default JSON handling for OutputElasticsearch_Secrets to handle AdditionalProperties -func (a *OutputElasticsearch_Secrets) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err +// StatusCode returns HTTPResponse.StatusCode +func (r PutSyntheticMonitorResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["ssl"]; found { - err = json.Unmarshal(raw, &a.Ssl) - if err != nil { - return fmt.Errorf("error reading 'ssl': %w", err) - } - delete(object, "ssl") +type GetParametersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]SyntheticsGetParameterResponse +} + +// Status returns HTTPResponse.Status +func (r GetParametersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetParametersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for OutputElasticsearch_Secrets to handle AdditionalProperties -func (a OutputElasticsearch_Secrets) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +type PostParametersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *CreateParamResponse +} - if a.Ssl != nil { - object["ssl"], err = json.Marshal(a.Ssl) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ssl': %w", err) - } +// Status returns HTTPResponse.Status +func (r PostParametersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r PostParametersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return json.Marshal(object) + return 0 } -// Getter for additional properties for OutputKafka. Returns the specified -// element and whether it was found -func (a OutputKafka) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +type DeleteParametersResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]struct { + // Deleted Indicates whether the parameter was successfully deleted. It is `true` if it was deleted. It is `false` if it was not deleted. + Deleted *bool `json:"deleted,omitempty"` + + // Id The unique identifier for the deleted parameter. + Id *string `json:"id,omitempty"` } - return } -// Setter for additional properties for OutputKafka -func (a *OutputKafka) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// Status returns HTTPResponse.Status +func (r DeleteParametersResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - a.AdditionalProperties[fieldName] = value + return http.StatusText(0) } -// Override default JSON handling for OutputKafka to handle AdditionalProperties -func (a *OutputKafka) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteParametersResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["allow_edit"]; found { - err = json.Unmarshal(raw, &a.AllowEdit) - if err != nil { - return fmt.Errorf("error reading 'allow_edit': %w", err) - } - delete(object, "allow_edit") - } +type DeleteParameterResponse struct { + Body []byte + HTTPResponse *http.Response +} - if raw, found := object["auth_type"]; found { - err = json.Unmarshal(raw, &a.AuthType) - if err != nil { - return fmt.Errorf("error reading 'auth_type': %w", err) - } - delete(object, "auth_type") +// Status returns HTTPResponse.Status +func (r DeleteParameterResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["broker_timeout"]; found { - err = json.Unmarshal(raw, &a.BrokerTimeout) - if err != nil { - return fmt.Errorf("error reading 'broker_timeout': %w", err) - } - delete(object, "broker_timeout") +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteParameterResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["ca_sha256"]; found { - err = json.Unmarshal(raw, &a.CaSha256) - if err != nil { - return fmt.Errorf("error reading 'ca_sha256': %w", err) - } - delete(object, "ca_sha256") - } +type GetParameterResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SyntheticsGetParameterResponse +} - if raw, found := object["ca_trusted_fingerprint"]; found { - err = json.Unmarshal(raw, &a.CaTrustedFingerprint) - if err != nil { - return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) - } - delete(object, "ca_trusted_fingerprint") +// Status returns HTTPResponse.Status +func (r GetParameterResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["client_id"]; found { - err = json.Unmarshal(raw, &a.ClientId) - if err != nil { - return fmt.Errorf("error reading 'client_id': %w", err) - } - delete(object, "client_id") +// StatusCode returns HTTPResponse.StatusCode +func (r GetParameterResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["compression"]; found { - err = json.Unmarshal(raw, &a.Compression) - if err != nil { - return fmt.Errorf("error reading 'compression': %w", err) - } - delete(object, "compression") - } +type PutParameterResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} +} - if raw, found := object["compression_level"]; found { - err = json.Unmarshal(raw, &a.CompressionLevel) - if err != nil { - return fmt.Errorf("error reading 'compression_level': %w", err) - } - delete(object, "compression_level") +// Status returns HTTPResponse.Status +func (r PutParameterResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["config_yaml"]; found { - err = json.Unmarshal(raw, &a.ConfigYaml) - if err != nil { - return fmt.Errorf("error reading 'config_yaml': %w", err) - } - delete(object, "config_yaml") +// StatusCode returns HTTPResponse.StatusCode +func (r PutParameterResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["connection_type"]; found { - err = json.Unmarshal(raw, &a.ConnectionType) - if err != nil { - return fmt.Errorf("error reading 'connection_type': %w", err) - } - delete(object, "connection_type") - } +type GetPrivateLocationsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]SyntheticsGetPrivateLocation +} - if raw, found := object["hash"]; found { - err = json.Unmarshal(raw, &a.Hash) - if err != nil { - return fmt.Errorf("error reading 'hash': %w", err) - } - delete(object, "hash") +// Status returns HTTPResponse.Status +func (r GetPrivateLocationsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["headers"]; found { - err = json.Unmarshal(raw, &a.Headers) - if err != nil { - return fmt.Errorf("error reading 'headers': %w", err) - } - delete(object, "headers") +// StatusCode returns HTTPResponse.StatusCode +func (r GetPrivateLocationsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["hosts"]; found { - err = json.Unmarshal(raw, &a.Hosts) - if err != nil { - return fmt.Errorf("error reading 'hosts': %w", err) - } - delete(object, "hosts") - } +type PostPrivateLocationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// Status returns HTTPResponse.Status +func (r PostPrivateLocationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["is_default"]; found { - err = json.Unmarshal(raw, &a.IsDefault) - if err != nil { - return fmt.Errorf("error reading 'is_default': %w", err) - } - delete(object, "is_default") +// StatusCode returns HTTPResponse.StatusCode +func (r PostPrivateLocationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["is_default_monitoring"]; found { - err = json.Unmarshal(raw, &a.IsDefaultMonitoring) - if err != nil { - return fmt.Errorf("error reading 'is_default_monitoring': %w", err) - } - delete(object, "is_default_monitoring") - } +type DeletePrivateLocationResponse struct { + Body []byte + HTTPResponse *http.Response +} - if raw, found := object["is_internal"]; found { - err = json.Unmarshal(raw, &a.IsInternal) - if err != nil { - return fmt.Errorf("error reading 'is_internal': %w", err) - } - delete(object, "is_internal") +// Status returns HTTPResponse.Status +func (r DeletePrivateLocationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["is_preconfigured"]; found { - err = json.Unmarshal(raw, &a.IsPreconfigured) - if err != nil { - return fmt.Errorf("error reading 'is_preconfigured': %w", err) - } - delete(object, "is_preconfigured") +// StatusCode returns HTTPResponse.StatusCode +func (r DeletePrivateLocationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["key"]; found { - err = json.Unmarshal(raw, &a.Key) - if err != nil { - return fmt.Errorf("error reading 'key': %w", err) - } - delete(object, "key") - } +type GetPrivateLocationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SyntheticsGetPrivateLocation +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// Status returns HTTPResponse.Status +func (r GetPrivateLocationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["partition"]; found { - err = json.Unmarshal(raw, &a.Partition) - if err != nil { - return fmt.Errorf("error reading 'partition': %w", err) - } - delete(object, "partition") +// StatusCode returns HTTPResponse.StatusCode +func (r GetPrivateLocationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["password"]; found { - err = json.Unmarshal(raw, &a.Password) - if err != nil { - return fmt.Errorf("error reading 'password': %w", err) - } - delete(object, "password") - } +type PutPrivateLocationResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SyntheticsGetPrivateLocation +} - if raw, found := object["proxy_id"]; found { - err = json.Unmarshal(raw, &a.ProxyId) - if err != nil { - return fmt.Errorf("error reading 'proxy_id': %w", err) - } - delete(object, "proxy_id") +// Status returns HTTPResponse.Status +func (r PutPrivateLocationResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["random"]; found { - err = json.Unmarshal(raw, &a.Random) - if err != nil { - return fmt.Errorf("error reading 'random': %w", err) - } - delete(object, "random") +// StatusCode returns HTTPResponse.StatusCode +func (r PutPrivateLocationResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["required_acks"]; found { - err = json.Unmarshal(raw, &a.RequiredAcks) - if err != nil { - return fmt.Errorf("error reading 'required_acks': %w", err) - } - delete(object, "required_acks") +type TaskManagerHealthResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *TaskManagerHealthAPIsHealthResponse +} + +// Status returns HTTPResponse.Status +func (r TaskManagerHealthResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["round_robin"]; found { - err = json.Unmarshal(raw, &a.RoundRobin) - if err != nil { - return fmt.Errorf("error reading 'round_robin': %w", err) - } - delete(object, "round_robin") +// StatusCode returns HTTPResponse.StatusCode +func (r TaskManagerHealthResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["sasl"]; found { - err = json.Unmarshal(raw, &a.Sasl) - if err != nil { - return fmt.Errorf("error reading 'sasl': %w", err) - } - delete(object, "sasl") +type DeleteTimelinesResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r DeleteTimelinesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["secrets"]; found { - err = json.Unmarshal(raw, &a.Secrets) - if err != nil { - return fmt.Errorf("error reading 'secrets': %w", err) - } - delete(object, "secrets") +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteTimelinesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["shipper"]; found { - err = json.Unmarshal(raw, &a.Shipper) - if err != nil { - return fmt.Errorf("error reading 'shipper': %w", err) - } - delete(object, "shipper") +type GetTimelineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPITimelineResponse +} + +// Status returns HTTPResponse.Status +func (r GetTimelineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["ssl"]; found { - err = json.Unmarshal(raw, &a.Ssl) - if err != nil { - return fmt.Errorf("error reading 'ssl': %w", err) - } - delete(object, "ssl") +// StatusCode returns HTTPResponse.StatusCode +func (r GetTimelineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["timeout"]; found { - err = json.Unmarshal(raw, &a.Timeout) - if err != nil { - return fmt.Errorf("error reading 'timeout': %w", err) - } - delete(object, "timeout") +type PatchTimelineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIPersistTimelineResponse + JSON405 *struct { + // Body The error message + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` } +} - if raw, found := object["topic"]; found { - err = json.Unmarshal(raw, &a.Topic) - if err != nil { - return fmt.Errorf("error reading 'topic': %w", err) - } - delete(object, "topic") +// Status returns HTTPResponse.Status +func (r PatchTimelineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +// StatusCode returns HTTPResponse.StatusCode +func (r PatchTimelineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["username"]; found { - err = json.Unmarshal(raw, &a.Username) - if err != nil { - return fmt.Errorf("error reading 'username': %w", err) - } - delete(object, "username") +type CreateTimelinesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIPersistTimelineResponse + JSON405 *struct { + // Body The error message + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` } +} - if raw, found := object["version"]; found { - err = json.Unmarshal(raw, &a.Version) - if err != nil { - return fmt.Errorf("error reading 'version': %w", err) - } - delete(object, "version") +// Status returns HTTPResponse.Status +func (r CreateTimelinesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r CreateTimelinesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for OutputKafka to handle AdditionalProperties -func (a OutputKafka) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +type CopyTimelineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIPersistTimelineResponse +} - if a.AllowEdit != nil { - object["allow_edit"], err = json.Marshal(a.AllowEdit) - if err != nil { - return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) - } +// Status returns HTTPResponse.Status +func (r CopyTimelineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - object["auth_type"], err = json.Marshal(a.AuthType) - if err != nil { - return nil, fmt.Errorf("error marshaling 'auth_type': %w", err) +// StatusCode returns HTTPResponse.StatusCode +func (r CopyTimelineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.BrokerTimeout != nil { - object["broker_timeout"], err = json.Marshal(a.BrokerTimeout) - if err != nil { - return nil, fmt.Errorf("error marshaling 'broker_timeout': %w", err) - } - } +type GetDraftTimelinesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIPersistTimelineResponse +} - if a.CaSha256 != nil { - object["ca_sha256"], err = json.Marshal(a.CaSha256) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) - } +// Status returns HTTPResponse.Status +func (r GetDraftTimelinesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.CaTrustedFingerprint != nil { - object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetDraftTimelinesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.ClientId != nil { - object["client_id"], err = json.Marshal(a.ClientId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'client_id': %w", err) - } - } +type CleanDraftTimelinesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIPersistTimelineResponse +} - if a.Compression != nil { - object["compression"], err = json.Marshal(a.Compression) - if err != nil { - return nil, fmt.Errorf("error marshaling 'compression': %w", err) - } +// Status returns HTTPResponse.Status +func (r CleanDraftTimelinesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - object["compression_level"], err = json.Marshal(a.CompressionLevel) - if err != nil { - return nil, fmt.Errorf("error marshaling 'compression_level': %w", err) +// StatusCode returns HTTPResponse.StatusCode +func (r CleanDraftTimelinesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.ConfigYaml != nil { - object["config_yaml"], err = json.Marshal(a.ConfigYaml) - if err != nil { - return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) - } - } +type ExportTimelinesResponse struct { + Body []byte + HTTPResponse *http.Response +} - object["connection_type"], err = json.Marshal(a.ConnectionType) - if err != nil { - return nil, fmt.Errorf("error marshaling 'connection_type': %w", err) +// Status returns HTTPResponse.Status +func (r ExportTimelinesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.Hash != nil { - object["hash"], err = json.Marshal(a.Hash) - if err != nil { - return nil, fmt.Errorf("error marshaling 'hash': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r ExportTimelinesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.Headers != nil { - object["headers"], err = json.Marshal(a.Headers) - if err != nil { - return nil, fmt.Errorf("error marshaling 'headers': %w", err) - } - } +type PersistFavoriteRouteResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIFavoriteTimelineResponse +} - object["hosts"], err = json.Marshal(a.Hosts) - if err != nil { - return nil, fmt.Errorf("error marshaling 'hosts': %w", err) +// Status returns HTTPResponse.Status +func (r PersistFavoriteRouteResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.Id != nil { - object["id"], err = json.Marshal(a.Id) - if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r PersistFavoriteRouteResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.IsDefault != nil { - object["is_default"], err = json.Marshal(a.IsDefault) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_default': %w", err) - } +type ImportTimelinesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIImportTimelineResult + JSON400 *struct { + // Body The error message + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` } - - if a.IsDefaultMonitoring != nil { - object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) - } + JSON404 *struct { + // Body The error message + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + JSON409 *struct { + // Body The error message + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` } +} - if a.IsInternal != nil { - object["is_internal"], err = json.Marshal(a.IsInternal) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) - } +// Status returns HTTPResponse.Status +func (r ImportTimelinesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.IsPreconfigured != nil { - object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r ImportTimelinesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.Key != nil { - object["key"], err = json.Marshal(a.Key) - if err != nil { - return nil, fmt.Errorf("error marshaling 'key': %w", err) - } +type InstallPrepackedTimelinesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIImportTimelineResult +} + +// Status returns HTTPResponse.Status +func (r InstallPrepackedTimelinesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - object["name"], err = json.Marshal(a.Name) - if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) +// StatusCode returns HTTPResponse.StatusCode +func (r InstallPrepackedTimelinesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.Partition != nil { - object["partition"], err = json.Marshal(a.Partition) - if err != nil { - return nil, fmt.Errorf("error marshaling 'partition': %w", err) - } +type ResolveTimelineResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SecurityTimelineAPIResolvedTimeline +} + +// Status returns HTTPResponse.Status +func (r ResolveTimelineResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - object["password"], err = json.Marshal(a.Password) - if err != nil { - return nil, fmt.Errorf("error marshaling 'password': %w", err) +// StatusCode returns HTTPResponse.StatusCode +func (r ResolveTimelineResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.ProxyId != nil { - object["proxy_id"], err = json.Marshal(a.ProxyId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) - } +type GetTimelinesResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // CustomTemplateTimelineCount The amount of custom Timeline templates in the results + CustomTemplateTimelineCount *float32 `json:"customTemplateTimelineCount,omitempty"` + + // DefaultTimelineCount The amount of `default` type Timelines in the results + DefaultTimelineCount *float32 `json:"defaultTimelineCount,omitempty"` + + // ElasticTemplateTimelineCount The amount of Elastic's Timeline templates in the results + ElasticTemplateTimelineCount *float32 `json:"elasticTemplateTimelineCount,omitempty"` + + // FavoriteCount The amount of favorited Timelines + FavoriteCount *float32 `json:"favoriteCount,omitempty"` + + // TemplateTimelineCount The amount of Timeline templates in the results + TemplateTimelineCount *float32 `json:"templateTimelineCount,omitempty"` + Timeline []SecurityTimelineAPITimelineResponse `json:"timeline"` + + // TotalCount The total amount of results + TotalCount float32 `json:"totalCount"` } +} - if a.Random != nil { - object["random"], err = json.Marshal(a.Random) - if err != nil { - return nil, fmt.Errorf("error marshaling 'random': %w", err) - } +// Status returns HTTPResponse.Status +func (r GetTimelinesResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.RequiredAcks != nil { - object["required_acks"], err = json.Marshal(a.RequiredAcks) - if err != nil { - return nil, fmt.Errorf("error marshaling 'required_acks': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetTimelinesResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.RoundRobin != nil { - object["round_robin"], err = json.Marshal(a.RoundRobin) - if err != nil { - return nil, fmt.Errorf("error marshaling 'round_robin': %w", err) - } +type GetUpgradeStatusResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r GetUpgradeStatusResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.Sasl != nil { - object["sasl"], err = json.Marshal(a.Sasl) - if err != nil { - return nil, fmt.Errorf("error marshaling 'sasl': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetUpgradeStatusResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.Secrets != nil { - object["secrets"], err = json.Marshal(a.Secrets) - if err != nil { - return nil, fmt.Errorf("error marshaling 'secrets': %w", err) - } +type GetUptimeSettingsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} +} + +// Status returns HTTPResponse.Status +func (r GetUptimeSettingsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.Shipper != nil { - object["shipper"], err = json.Marshal(a.Shipper) - if err != nil { - return nil, fmt.Errorf("error marshaling 'shipper': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetUptimeSettingsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.Ssl != nil { - object["ssl"], err = json.Marshal(a.Ssl) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ssl': %w", err) - } +type PutUptimeSettingsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *map[string]interface{} +} + +// Status returns HTTPResponse.Status +func (r PutUptimeSettingsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.Timeout != nil { - object["timeout"], err = json.Marshal(a.Timeout) - if err != nil { - return nil, fmt.Errorf("error marshaling 'timeout': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r PutUptimeSettingsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if a.Topic != nil { - object["topic"], err = json.Marshal(a.Topic) - if err != nil { - return nil, fmt.Errorf("error marshaling 'topic': %w", err) - } +type DeleteActionsConnectorIdResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r DeleteActionsConnectorIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - object["type"], err = json.Marshal(a.Type) - if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteActionsConnectorIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - object["username"], err = json.Marshal(a.Username) - if err != nil { - return nil, fmt.Errorf("error marshaling 'username': %w", err) +type GetActionsConnectorIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *ConnectorResponse +} + +// Status returns HTTPResponse.Status +func (r GetActionsConnectorIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if a.Version != nil { - object["version"], err = json.Marshal(a.Version) - if err != nil { - return nil, fmt.Errorf("error marshaling 'version': %w", err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetActionsConnectorIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +type PostActionsConnectorIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Config *map[string]interface{} `json:"config,omitempty"` + + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` + + // Id The identifier for the connector. + Id string `json:"id"` + + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` + + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` + + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` } - return json.Marshal(object) } -// Getter for additional properties for OutputKafka_Hash. Returns the specified -// element and whether it was found -func (a OutputKafka_Hash) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// Status returns HTTPResponse.Status +func (r PostActionsConnectorIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - return + return http.StatusText(0) } -// Setter for additional properties for OutputKafka_Hash -func (a *OutputKafka_Hash) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// StatusCode returns HTTPResponse.StatusCode +func (r PostActionsConnectorIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - a.AdditionalProperties[fieldName] = value + return 0 } -// Override default JSON handling for OutputKafka_Hash to handle AdditionalProperties -func (a *OutputKafka_Hash) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err - } +type PutActionsConnectorIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + Config *map[string]interface{} `json:"config,omitempty"` - if raw, found := object["hash"]; found { - err = json.Unmarshal(raw, &a.Hash) - if err != nil { - return fmt.Errorf("error reading 'hash': %w", err) - } - delete(object, "hash") - } + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` - if raw, found := object["random"]; found { - err = json.Unmarshal(raw, &a.Random) - if err != nil { - return fmt.Errorf("error reading 'random': %w", err) - } - delete(object, "random") - } + // Id The identifier for the connector. + Id string `json:"id"` - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } - } - return nil -} + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` -// Override default JSON handling for OutputKafka_Hash to handle AdditionalProperties -func (a OutputKafka_Hash) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` - if a.Hash != nil { - object["hash"], err = json.Marshal(a.Hash) - if err != nil { - return nil, fmt.Errorf("error marshaling 'hash': %w", err) - } - } + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` - if a.Random != nil { - object["random"], err = json.Marshal(a.Random) - if err != nil { - return nil, fmt.Errorf("error marshaling 'random': %w", err) - } + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` } +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// Status returns HTTPResponse.Status +func (r PutActionsConnectorIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - return json.Marshal(object) + return http.StatusText(0) } -// Getter for additional properties for OutputKafka_Headers_Item. Returns the specified -// element and whether it was found -func (a OutputKafka_Headers_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// StatusCode returns HTTPResponse.StatusCode +func (r PutActionsConnectorIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return + return 0 } -// Setter for additional properties for OutputKafka_Headers_Item -func (a *OutputKafka_Headers_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +type GetActionsConnectorsResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *[]ConnectorResponse +} + +// Status returns HTTPResponse.Status +func (r GetActionsConnectorsResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - a.AdditionalProperties[fieldName] = value + return http.StatusText(0) } -// Override default JSON handling for OutputKafka_Headers_Item to handle AdditionalProperties -func (a *OutputKafka_Headers_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err +// StatusCode returns HTTPResponse.StatusCode +func (r GetActionsConnectorsResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["key"]; found { - err = json.Unmarshal(raw, &a.Key) - if err != nil { - return fmt.Errorf("error reading 'key': %w", err) - } - delete(object, "key") +type GetAllDataViewsDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + DataView *[]GetDataViewsResponseItem `json:"data_view,omitempty"` } + JSON400 *DataViews400Response +} - if raw, found := object["value"]; found { - err = json.Unmarshal(raw, &a.Value) - if err != nil { - return fmt.Errorf("error reading 'value': %w", err) - } - delete(object, "value") +// Status returns HTTPResponse.Status +func (r GetAllDataViewsDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetAllDataViewsDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for OutputKafka_Headers_Item to handle AdditionalProperties -func (a OutputKafka_Headers_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +type CreateDataViewDefaultwResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *DataViewsDataViewResponseObject + JSON400 *DataViews400Response +} - object["key"], err = json.Marshal(a.Key) - if err != nil { - return nil, fmt.Errorf("error marshaling 'key': %w", err) +// Status returns HTTPResponse.Status +func (r CreateDataViewDefaultwResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - object["value"], err = json.Marshal(a.Value) - if err != nil { - return nil, fmt.Errorf("error marshaling 'value': %w", err) +// StatusCode returns HTTPResponse.StatusCode +func (r CreateDataViewDefaultwResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } - } - return json.Marshal(object) +type DeleteDataViewDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON404 *DataViews404Response } -// Getter for additional properties for OutputKafka_Random. Returns the specified -// element and whether it was found -func (a OutputKafka_Random) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// Status returns HTTPResponse.Status +func (r DeleteDataViewDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - return + return http.StatusText(0) } -// Setter for additional properties for OutputKafka_Random -func (a *OutputKafka_Random) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteDataViewDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - a.AdditionalProperties[fieldName] = value + return 0 } -// Override default JSON handling for OutputKafka_Random to handle AdditionalProperties -func (a *OutputKafka_Random) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err - } +type GetDataViewDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *DataViewsDataViewResponseObject + JSON404 *DataViews404Response +} - if raw, found := object["group_events"]; found { - err = json.Unmarshal(raw, &a.GroupEvents) - if err != nil { - return fmt.Errorf("error reading 'group_events': %w", err) - } - delete(object, "group_events") +// Status returns HTTPResponse.Status +func (r GetDataViewDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetDataViewDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for OutputKafka_Random to handle AdditionalProperties -func (a OutputKafka_Random) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +type UpdateDataViewDefaultResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *DataViewsDataViewResponseObject + JSON400 *DataViews400Response +} - if a.GroupEvents != nil { - object["group_events"], err = json.Marshal(a.GroupEvents) - if err != nil { - return nil, fmt.Errorf("error marshaling 'group_events': %w", err) - } +// Status returns HTTPResponse.Status +func (r UpdateDataViewDefaultResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateDataViewDefaultResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return json.Marshal(object) + return 0 } -// Getter for additional properties for OutputKafka_RoundRobin. Returns the specified -// element and whether it was found -func (a OutputKafka_RoundRobin) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +type PostMaintenanceWindowResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` + + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` + + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Status The current status of the maintenance window. + Status PostMaintenanceWindow200Status `json:"status"` + + // Title The name of the maintenance window. + Title string `json:"title"` + + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` } - return } +type PostMaintenanceWindow200Status string -// Setter for additional properties for OutputKafka_RoundRobin -func (a *OutputKafka_RoundRobin) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// Status returns HTTPResponse.Status +func (r PostMaintenanceWindowResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - a.AdditionalProperties[fieldName] = value + return http.StatusText(0) } -// Override default JSON handling for OutputKafka_RoundRobin to handle AdditionalProperties -func (a *OutputKafka_RoundRobin) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err +// StatusCode returns HTTPResponse.StatusCode +func (r PostMaintenanceWindowResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["group_events"]; found { - err = json.Unmarshal(raw, &a.GroupEvents) - if err != nil { - return fmt.Errorf("error reading 'group_events': %w", err) - } - delete(object, "group_events") +type DeleteMaintenanceWindowIdResponse struct { + Body []byte + HTTPResponse *http.Response +} + +// Status returns HTTPResponse.Status +func (r DeleteMaintenanceWindowIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteMaintenanceWindowIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for OutputKafka_RoundRobin to handle AdditionalProperties -func (a OutputKafka_RoundRobin) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +type GetMaintenanceWindowIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` - if a.GroupEvents != nil { - object["group_events"], err = json.Marshal(a.GroupEvents) - if err != nil { - return nil, fmt.Errorf("error marshaling 'group_events': %w", err) - } - } + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` + + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` + + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Status The current status of the maintenance window. + Status GetMaintenanceWindowId200Status `json:"status"` + + // Title The name of the maintenance window. + Title string `json:"title"` - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` } - return json.Marshal(object) } +type GetMaintenanceWindowId200Status string -// Getter for additional properties for OutputKafka_Sasl. Returns the specified -// element and whether it was found -func (a OutputKafka_Sasl) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// Status returns HTTPResponse.Status +func (r GetMaintenanceWindowIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - return + return http.StatusText(0) } -// Setter for additional properties for OutputKafka_Sasl -func (a *OutputKafka_Sasl) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// StatusCode returns HTTPResponse.StatusCode +func (r GetMaintenanceWindowIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - a.AdditionalProperties[fieldName] = value + return 0 } -// Override default JSON handling for OutputKafka_Sasl to handle AdditionalProperties -func (a *OutputKafka_Sasl) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err - } +type PatchMaintenanceWindowIdResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` - if raw, found := object["mechanism"]; found { - err = json.Unmarshal(raw, &a.Mechanism) - if err != nil { - return fmt.Errorf("error reading 'mechanism': %w", err) - } - delete(object, "mechanism") - } + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } - } - return nil -} + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` -// Override default JSON handling for OutputKafka_Sasl to handle AdditionalProperties -func (a OutputKafka_Sasl) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` - if a.Mechanism != nil { - object["mechanism"], err = json.Marshal(a.Mechanism) - if err != nil { - return nil, fmt.Errorf("error marshaling 'mechanism': %w", err) - } - } + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } - } - return json.Marshal(object) -} + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` -// Getter for additional properties for OutputKafkaSecretsPassword0. Returns the specified -// element and whether it was found -func (a OutputKafkaSecretsPassword0) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] - } - return -} + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` -// Setter for additional properties for OutputKafkaSecretsPassword0 -func (a *OutputKafkaSecretsPassword0) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) - } - a.AdditionalProperties[fieldName] = value -} + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` -// Override default JSON handling for OutputKafkaSecretsPassword0 to handle AdditionalProperties -func (a *OutputKafkaSecretsPassword0) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err - } + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") - } + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } - } - return nil -} + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` -// Override default JSON handling for OutputKafkaSecretsPassword0 to handle AdditionalProperties -func (a OutputKafkaSecretsPassword0) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) + // Status The current status of the maintenance window. + Status PatchMaintenanceWindowId200Status `json:"status"` - object["id"], err = json.Marshal(a.Id) - if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) - } + // Title The name of the maintenance window. + Title string `json:"title"` - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` } - return json.Marshal(object) } +type PatchMaintenanceWindowId200Status string -// Getter for additional properties for OutputKafkaSecretsSslKey0. Returns the specified -// element and whether it was found -func (a OutputKafkaSecretsSslKey0) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// Status returns HTTPResponse.Status +func (r PatchMaintenanceWindowIdResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - return + return http.StatusText(0) } -// Setter for additional properties for OutputKafkaSecretsSslKey0 -func (a *OutputKafkaSecretsSslKey0) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// StatusCode returns HTTPResponse.StatusCode +func (r PatchMaintenanceWindowIdResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - a.AdditionalProperties[fieldName] = value + return 0 } -// Override default JSON handling for OutputKafkaSecretsSslKey0 to handle AdditionalProperties -func (a *OutputKafkaSecretsSslKey0) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err - } +type FindSlosOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SLOsFindSloResponse + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response + JSON404 *SLOs404Response +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// Status returns HTTPResponse.Status +func (r FindSlosOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r FindSlosOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for OutputKafkaSecretsSslKey0 to handle AdditionalProperties -func (a OutputKafkaSecretsSslKey0) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +type CreateSloOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SLOsCreateSloResponse + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response + JSON409 *SLOs409Response +} - object["id"], err = json.Marshal(a.Id) - if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) +// Status returns HTTPResponse.Status +func (r CreateSloOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r CreateSloOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return json.Marshal(object) + return 0 } -// Getter for additional properties for OutputKafka_Secrets_Ssl. Returns the specified -// element and whether it was found -func (a OutputKafka_Secrets_Ssl) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] - } - return +type BulkDeleteOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SLOsBulkDeleteResponse + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response } -// Setter for additional properties for OutputKafka_Secrets_Ssl -func (a *OutputKafka_Secrets_Ssl) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// Status returns HTTPResponse.Status +func (r BulkDeleteOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - a.AdditionalProperties[fieldName] = value + return http.StatusText(0) } -// Override default JSON handling for OutputKafka_Secrets_Ssl to handle AdditionalProperties -func (a *OutputKafka_Secrets_Ssl) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err +// StatusCode returns HTTPResponse.StatusCode +func (r BulkDeleteOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["key"]; found { - err = json.Unmarshal(raw, &a.Key) - if err != nil { - return fmt.Errorf("error reading 'key': %w", err) - } - delete(object, "key") +type BulkDeleteStatusOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SLOsBulkDeleteStatusResponse + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response +} + +// Status returns HTTPResponse.Status +func (r BulkDeleteStatusOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r BulkDeleteStatusOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for OutputKafka_Secrets_Ssl to handle AdditionalProperties -func (a OutputKafka_Secrets_Ssl) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +type DeleteRollupDataOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SLOsBulkPurgeRollupResponse + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response +} - object["key"], err = json.Marshal(a.Key) - if err != nil { - return nil, fmt.Errorf("error marshaling 'key': %w", err) +// Status returns HTTPResponse.Status +func (r DeleteRollupDataOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteRollupDataOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return json.Marshal(object) + return 0 } -// Getter for additional properties for OutputKafka_Secrets. Returns the specified -// element and whether it was found -func (a OutputKafka_Secrets) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] - } - return +type DeleteSloInstancesOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response } -// Setter for additional properties for OutputKafka_Secrets -func (a *OutputKafka_Secrets) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// Status returns HTTPResponse.Status +func (r DeleteSloInstancesOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - a.AdditionalProperties[fieldName] = value + return http.StatusText(0) } -// Override default JSON handling for OutputKafka_Secrets to handle AdditionalProperties -func (a *OutputKafka_Secrets) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteSloInstancesOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["password"]; found { - err = json.Unmarshal(raw, &a.Password) - if err != nil { - return fmt.Errorf("error reading 'password': %w", err) - } - delete(object, "password") - } +type DeleteSloOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response + JSON404 *SLOs404Response +} - if raw, found := object["ssl"]; found { - err = json.Unmarshal(raw, &a.Ssl) - if err != nil { - return fmt.Errorf("error reading 'ssl': %w", err) - } - delete(object, "ssl") +// Status returns HTTPResponse.Status +func (r DeleteSloOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// StatusCode returns HTTPResponse.StatusCode +func (r DeleteSloOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return nil + return 0 } -// Override default JSON handling for OutputKafka_Secrets to handle AdditionalProperties -func (a OutputKafka_Secrets) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - if a.Password != nil { - object["password"], err = json.Marshal(a.Password) - if err != nil { - return nil, fmt.Errorf("error marshaling 'password': %w", err) - } - } +type GetSloOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SLOsSloWithSummaryResponse + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response + JSON404 *SLOs404Response +} - if a.Ssl != nil { - object["ssl"], err = json.Marshal(a.Ssl) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ssl': %w", err) - } +// Status returns HTTPResponse.Status +func (r GetSloOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// StatusCode returns HTTPResponse.StatusCode +func (r GetSloOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } - return json.Marshal(object) + return 0 } -// Getter for additional properties for OutputLogstash. Returns the specified -// element and whether it was found -func (a OutputLogstash) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] - } - return +type UpdateSloOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SLOsSloDefinitionResponse + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response + JSON404 *SLOs404Response } -// Setter for additional properties for OutputLogstash -func (a *OutputLogstash) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// Status returns HTTPResponse.Status +func (r UpdateSloOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } - a.AdditionalProperties[fieldName] = value + return http.StatusText(0) } -// Override default JSON handling for OutputLogstash to handle AdditionalProperties -func (a *OutputLogstash) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) - if err != nil { - return err +// StatusCode returns HTTPResponse.StatusCode +func (r UpdateSloOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["allow_edit"]; found { - err = json.Unmarshal(raw, &a.AllowEdit) - if err != nil { - return fmt.Errorf("error reading 'allow_edit': %w", err) - } - delete(object, "allow_edit") - } +type ResetSloOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SLOsSloDefinitionResponse + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response + JSON404 *SLOs404Response +} - if raw, found := object["ca_sha256"]; found { - err = json.Unmarshal(raw, &a.CaSha256) - if err != nil { - return fmt.Errorf("error reading 'ca_sha256': %w", err) - } - delete(object, "ca_sha256") +// Status returns HTTPResponse.Status +func (r ResetSloOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["ca_trusted_fingerprint"]; found { - err = json.Unmarshal(raw, &a.CaTrustedFingerprint) - if err != nil { - return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) - } - delete(object, "ca_trusted_fingerprint") +// StatusCode returns HTTPResponse.StatusCode +func (r ResetSloOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["config_yaml"]; found { - err = json.Unmarshal(raw, &a.ConfigYaml) - if err != nil { - return fmt.Errorf("error reading 'config_yaml': %w", err) - } - delete(object, "config_yaml") - } +type DisableSloOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response + JSON404 *SLOs404Response +} - if raw, found := object["hosts"]; found { - err = json.Unmarshal(raw, &a.Hosts) - if err != nil { - return fmt.Errorf("error reading 'hosts': %w", err) - } - delete(object, "hosts") +// Status returns HTTPResponse.Status +func (r DisableSloOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// StatusCode returns HTTPResponse.StatusCode +func (r DisableSloOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["is_default"]; found { - err = json.Unmarshal(raw, &a.IsDefault) - if err != nil { - return fmt.Errorf("error reading 'is_default': %w", err) - } - delete(object, "is_default") +type EnableSloOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response + JSON404 *SLOs404Response +} + +// Status returns HTTPResponse.Status +func (r EnableSloOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["is_default_monitoring"]; found { - err = json.Unmarshal(raw, &a.IsDefaultMonitoring) - if err != nil { - return fmt.Errorf("error reading 'is_default_monitoring': %w", err) - } - delete(object, "is_default_monitoring") +// StatusCode returns HTTPResponse.StatusCode +func (r EnableSloOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["is_internal"]; found { - err = json.Unmarshal(raw, &a.IsInternal) - if err != nil { - return fmt.Errorf("error reading 'is_internal': %w", err) - } - delete(object, "is_internal") +type GetDefinitionsOpResponse struct { + Body []byte + HTTPResponse *http.Response + JSON200 *SLOsFindSloDefinitionsResponse + JSON400 *SLOs400Response + JSON401 *SLOs401Response + JSON403 *SLOs403Response +} + +// Status returns HTTPResponse.Status +func (r GetDefinitionsOpResponse) Status() string { + if r.HTTPResponse != nil { + return r.HTTPResponse.Status } + return http.StatusText(0) +} - if raw, found := object["is_preconfigured"]; found { - err = json.Unmarshal(raw, &a.IsPreconfigured) - if err != nil { - return fmt.Errorf("error reading 'is_preconfigured': %w", err) - } - delete(object, "is_preconfigured") +// StatusCode returns HTTPResponse.StatusCode +func (r GetDefinitionsOpResponse) StatusCode() int { + if r.HTTPResponse != nil { + return r.HTTPResponse.StatusCode } + return 0 +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// PostActionsConnectorIdExecuteWithBodyWithResponse request with arbitrary body returning *PostActionsConnectorIdExecuteResponse +func (c *ClientWithResponses) PostActionsConnectorIdExecuteWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdExecuteResponse, error) { + rsp, err := c.PostActionsConnectorIdExecuteWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostActionsConnectorIdExecuteResponse(rsp) +} - if raw, found := object["proxy_id"]; found { - err = json.Unmarshal(raw, &a.ProxyId) - if err != nil { - return fmt.Errorf("error reading 'proxy_id': %w", err) - } - delete(object, "proxy_id") +func (c *ClientWithResponses) PostActionsConnectorIdExecuteWithResponse(ctx context.Context, id string, body PostActionsConnectorIdExecuteJSONRequestBody, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdExecuteResponse, error) { + rsp, err := c.PostActionsConnectorIdExecute(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostActionsConnectorIdExecuteResponse(rsp) +} - if raw, found := object["secrets"]; found { - err = json.Unmarshal(raw, &a.Secrets) - if err != nil { - return fmt.Errorf("error reading 'secrets': %w", err) - } - delete(object, "secrets") +// GetActionsConnectorTypesWithResponse request returning *GetActionsConnectorTypesResponse +func (c *ClientWithResponses) GetActionsConnectorTypesWithResponse(ctx context.Context, params *GetActionsConnectorTypesParams, reqEditors ...RequestEditorFn) (*GetActionsConnectorTypesResponse, error) { + rsp, err := c.GetActionsConnectorTypes(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetActionsConnectorTypesResponse(rsp) +} - if raw, found := object["shipper"]; found { - err = json.Unmarshal(raw, &a.Shipper) - if err != nil { - return fmt.Errorf("error reading 'shipper': %w", err) - } - delete(object, "shipper") +// GetAlertingHealthWithResponse request returning *GetAlertingHealthResponse +func (c *ClientWithResponses) GetAlertingHealthWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetAlertingHealthResponse, error) { + rsp, err := c.GetAlertingHealth(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetAlertingHealthResponse(rsp) +} - if raw, found := object["ssl"]; found { - err = json.Unmarshal(raw, &a.Ssl) - if err != nil { - return fmt.Errorf("error reading 'ssl': %w", err) - } - delete(object, "ssl") +// DeleteAlertingRuleIdWithResponse request returning *DeleteAlertingRuleIdResponse +func (c *ClientWithResponses) DeleteAlertingRuleIdWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteAlertingRuleIdResponse, error) { + rsp, err := c.DeleteAlertingRuleId(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteAlertingRuleIdResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +// GetAlertingRuleIdWithResponse request returning *GetAlertingRuleIdResponse +func (c *ClientWithResponses) GetAlertingRuleIdWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetAlertingRuleIdResponse, error) { + rsp, err := c.GetAlertingRuleId(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseGetAlertingRuleIdResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// PostAlertingRuleIdWithBodyWithResponse request with arbitrary body returning *PostAlertingRuleIdResponse +func (c *ClientWithResponses) PostAlertingRuleIdWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdResponse, error) { + rsp, err := c.PostAlertingRuleIdWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePostAlertingRuleIdResponse(rsp) } -// Override default JSON handling for OutputLogstash to handle AdditionalProperties -func (a OutputLogstash) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +func (c *ClientWithResponses) PostAlertingRuleIdWithResponse(ctx context.Context, id string, body PostAlertingRuleIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdResponse, error) { + rsp, err := c.PostAlertingRuleId(ctx, id, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostAlertingRuleIdResponse(rsp) +} - if a.AllowEdit != nil { - object["allow_edit"], err = json.Marshal(a.AllowEdit) - if err != nil { - return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) - } +// PutAlertingRuleIdWithBodyWithResponse request with arbitrary body returning *PutAlertingRuleIdResponse +func (c *ClientWithResponses) PutAlertingRuleIdWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutAlertingRuleIdResponse, error) { + rsp, err := c.PutAlertingRuleIdWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutAlertingRuleIdResponse(rsp) +} - if a.CaSha256 != nil { - object["ca_sha256"], err = json.Marshal(a.CaSha256) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) - } +func (c *ClientWithResponses) PutAlertingRuleIdWithResponse(ctx context.Context, id string, body PutAlertingRuleIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PutAlertingRuleIdResponse, error) { + rsp, err := c.PutAlertingRuleId(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutAlertingRuleIdResponse(rsp) +} - if a.CaTrustedFingerprint != nil { - object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) - } +// PostAlertingRuleIdDisableWithBodyWithResponse request with arbitrary body returning *PostAlertingRuleIdDisableResponse +func (c *ClientWithResponses) PostAlertingRuleIdDisableWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdDisableResponse, error) { + rsp, err := c.PostAlertingRuleIdDisableWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostAlertingRuleIdDisableResponse(rsp) +} - if a.ConfigYaml != nil { - object["config_yaml"], err = json.Marshal(a.ConfigYaml) - if err != nil { - return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) - } +func (c *ClientWithResponses) PostAlertingRuleIdDisableWithResponse(ctx context.Context, id string, body PostAlertingRuleIdDisableJSONRequestBody, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdDisableResponse, error) { + rsp, err := c.PostAlertingRuleIdDisable(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostAlertingRuleIdDisableResponse(rsp) +} - object["hosts"], err = json.Marshal(a.Hosts) +// PostAlertingRuleIdEnableWithResponse request returning *PostAlertingRuleIdEnableResponse +func (c *ClientWithResponses) PostAlertingRuleIdEnableWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdEnableResponse, error) { + rsp, err := c.PostAlertingRuleIdEnable(ctx, id, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'hosts': %w", err) + return nil, err } + return ParsePostAlertingRuleIdEnableResponse(rsp) +} - if a.Id != nil { - object["id"], err = json.Marshal(a.Id) - if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) - } +// PostAlertingRuleIdMuteAllWithResponse request returning *PostAlertingRuleIdMuteAllResponse +func (c *ClientWithResponses) PostAlertingRuleIdMuteAllWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdMuteAllResponse, error) { + rsp, err := c.PostAlertingRuleIdMuteAll(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParsePostAlertingRuleIdMuteAllResponse(rsp) +} - if a.IsDefault != nil { - object["is_default"], err = json.Marshal(a.IsDefault) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_default': %w", err) - } +// PostAlertingRuleIdUnmuteAllWithResponse request returning *PostAlertingRuleIdUnmuteAllResponse +func (c *ClientWithResponses) PostAlertingRuleIdUnmuteAllWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdUnmuteAllResponse, error) { + rsp, err := c.PostAlertingRuleIdUnmuteAll(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParsePostAlertingRuleIdUnmuteAllResponse(rsp) +} - if a.IsDefaultMonitoring != nil { - object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) - } +// PostAlertingRuleIdUpdateApiKeyWithResponse request returning *PostAlertingRuleIdUpdateApiKeyResponse +func (c *ClientWithResponses) PostAlertingRuleIdUpdateApiKeyWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdUpdateApiKeyResponse, error) { + rsp, err := c.PostAlertingRuleIdUpdateApiKey(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParsePostAlertingRuleIdUpdateApiKeyResponse(rsp) +} - if a.IsInternal != nil { - object["is_internal"], err = json.Marshal(a.IsInternal) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) - } +// PostAlertingRuleIdSnoozeScheduleWithBodyWithResponse request with arbitrary body returning *PostAlertingRuleIdSnoozeScheduleResponse +func (c *ClientWithResponses) PostAlertingRuleIdSnoozeScheduleWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdSnoozeScheduleResponse, error) { + rsp, err := c.PostAlertingRuleIdSnoozeScheduleWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostAlertingRuleIdSnoozeScheduleResponse(rsp) +} - if a.IsPreconfigured != nil { - object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) - } +func (c *ClientWithResponses) PostAlertingRuleIdSnoozeScheduleWithResponse(ctx context.Context, id string, body PostAlertingRuleIdSnoozeScheduleJSONRequestBody, reqEditors ...RequestEditorFn) (*PostAlertingRuleIdSnoozeScheduleResponse, error) { + rsp, err := c.PostAlertingRuleIdSnoozeSchedule(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostAlertingRuleIdSnoozeScheduleResponse(rsp) +} - object["name"], err = json.Marshal(a.Name) +// DeleteAlertingRuleRuleidSnoozeScheduleScheduleidWithResponse request returning *DeleteAlertingRuleRuleidSnoozeScheduleScheduleidResponse +func (c *ClientWithResponses) DeleteAlertingRuleRuleidSnoozeScheduleScheduleidWithResponse(ctx context.Context, ruleId string, scheduleId string, reqEditors ...RequestEditorFn) (*DeleteAlertingRuleRuleidSnoozeScheduleScheduleidResponse, error) { + rsp, err := c.DeleteAlertingRuleRuleidSnoozeScheduleScheduleid(ctx, ruleId, scheduleId, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) + return nil, err } + return ParseDeleteAlertingRuleRuleidSnoozeScheduleScheduleidResponse(rsp) +} - if a.ProxyId != nil { - object["proxy_id"], err = json.Marshal(a.ProxyId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) - } +// PostAlertingRuleRuleIdAlertAlertIdMuteWithResponse request returning *PostAlertingRuleRuleIdAlertAlertIdMuteResponse +func (c *ClientWithResponses) PostAlertingRuleRuleIdAlertAlertIdMuteWithResponse(ctx context.Context, ruleId string, alertId string, reqEditors ...RequestEditorFn) (*PostAlertingRuleRuleIdAlertAlertIdMuteResponse, error) { + rsp, err := c.PostAlertingRuleRuleIdAlertAlertIdMute(ctx, ruleId, alertId, reqEditors...) + if err != nil { + return nil, err } + return ParsePostAlertingRuleRuleIdAlertAlertIdMuteResponse(rsp) +} - if a.Secrets != nil { - object["secrets"], err = json.Marshal(a.Secrets) - if err != nil { - return nil, fmt.Errorf("error marshaling 'secrets': %w", err) - } +// PostAlertingRuleRuleIdAlertAlertIdUnmuteWithResponse request returning *PostAlertingRuleRuleIdAlertAlertIdUnmuteResponse +func (c *ClientWithResponses) PostAlertingRuleRuleIdAlertAlertIdUnmuteWithResponse(ctx context.Context, ruleId string, alertId string, reqEditors ...RequestEditorFn) (*PostAlertingRuleRuleIdAlertAlertIdUnmuteResponse, error) { + rsp, err := c.PostAlertingRuleRuleIdAlertAlertIdUnmute(ctx, ruleId, alertId, reqEditors...) + if err != nil { + return nil, err } + return ParsePostAlertingRuleRuleIdAlertAlertIdUnmuteResponse(rsp) +} - if a.Shipper != nil { - object["shipper"], err = json.Marshal(a.Shipper) - if err != nil { - return nil, fmt.Errorf("error marshaling 'shipper': %w", err) - } +// GetRuleTypesWithResponse request returning *GetRuleTypesResponse +func (c *ClientWithResponses) GetRuleTypesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetRuleTypesResponse, error) { + rsp, err := c.GetRuleTypes(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetRuleTypesResponse(rsp) +} - if a.Ssl != nil { - object["ssl"], err = json.Marshal(a.Ssl) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ssl': %w", err) - } +// GetAlertingRulesFindWithResponse request returning *GetAlertingRulesFindResponse +func (c *ClientWithResponses) GetAlertingRulesFindWithResponse(ctx context.Context, params *GetAlertingRulesFindParams, reqEditors ...RequestEditorFn) (*GetAlertingRulesFindResponse, error) { + rsp, err := c.GetAlertingRulesFind(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetAlertingRulesFindResponse(rsp) +} - object["type"], err = json.Marshal(a.Type) +// CreateAgentKeyWithBodyWithResponse request with arbitrary body returning *CreateAgentKeyResponse +func (c *ClientWithResponses) CreateAgentKeyWithBodyWithResponse(ctx context.Context, params *CreateAgentKeyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAgentKeyResponse, error) { + rsp, err := c.CreateAgentKeyWithBody(ctx, params, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) + return nil, err } + return ParseCreateAgentKeyResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) CreateAgentKeyWithResponse(ctx context.Context, params *CreateAgentKeyParams, body CreateAgentKeyJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAgentKeyResponse, error) { + rsp, err := c.CreateAgentKey(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseCreateAgentKeyResponse(rsp) } -// Getter for additional properties for OutputLogstashSecretsSslKey0. Returns the specified -// element and whether it was found -func (a OutputLogstashSecretsSslKey0) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// SaveApmServerSchemaWithBodyWithResponse request with arbitrary body returning *SaveApmServerSchemaResponse +func (c *ClientWithResponses) SaveApmServerSchemaWithBodyWithResponse(ctx context.Context, params *SaveApmServerSchemaParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SaveApmServerSchemaResponse, error) { + rsp, err := c.SaveApmServerSchemaWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseSaveApmServerSchemaResponse(rsp) } -// Setter for additional properties for OutputLogstashSecretsSslKey0 -func (a *OutputLogstashSecretsSslKey0) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) SaveApmServerSchemaWithResponse(ctx context.Context, params *SaveApmServerSchemaParams, body SaveApmServerSchemaJSONRequestBody, reqEditors ...RequestEditorFn) (*SaveApmServerSchemaResponse, error) { + rsp, err := c.SaveApmServerSchema(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseSaveApmServerSchemaResponse(rsp) } -// Override default JSON handling for OutputLogstashSecretsSslKey0 to handle AdditionalProperties -func (a *OutputLogstashSecretsSslKey0) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// CreateAnnotationWithBodyWithResponse request with arbitrary body returning *CreateAnnotationResponse +func (c *ClientWithResponses) CreateAnnotationWithBodyWithResponse(ctx context.Context, serviceName string, params *CreateAnnotationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAnnotationResponse, error) { + rsp, err := c.CreateAnnotationWithBody(ctx, serviceName, params, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseCreateAnnotationResponse(rsp) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +func (c *ClientWithResponses) CreateAnnotationWithResponse(ctx context.Context, serviceName string, params *CreateAnnotationParams, body CreateAnnotationJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAnnotationResponse, error) { + rsp, err := c.CreateAnnotation(ctx, serviceName, params, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateAnnotationResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// GetAnnotationWithResponse request returning *GetAnnotationResponse +func (c *ClientWithResponses) GetAnnotationWithResponse(ctx context.Context, serviceName string, params *GetAnnotationParams, reqEditors ...RequestEditorFn) (*GetAnnotationResponse, error) { + rsp, err := c.GetAnnotation(ctx, serviceName, params, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseGetAnnotationResponse(rsp) } -// Override default JSON handling for OutputLogstashSecretsSslKey0 to handle AdditionalProperties -func (a OutputLogstashSecretsSslKey0) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// DeleteAgentConfigurationWithBodyWithResponse request with arbitrary body returning *DeleteAgentConfigurationResponse +func (c *ClientWithResponses) DeleteAgentConfigurationWithBodyWithResponse(ctx context.Context, params *DeleteAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteAgentConfigurationResponse, error) { + rsp, err := c.DeleteAgentConfigurationWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteAgentConfigurationResponse(rsp) +} - object["id"], err = json.Marshal(a.Id) +func (c *ClientWithResponses) DeleteAgentConfigurationWithResponse(ctx context.Context, params *DeleteAgentConfigurationParams, body DeleteAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteAgentConfigurationResponse, error) { + rsp, err := c.DeleteAgentConfiguration(ctx, params, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) + return nil, err } + return ParseDeleteAgentConfigurationResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// GetAgentConfigurationsWithResponse request returning *GetAgentConfigurationsResponse +func (c *ClientWithResponses) GetAgentConfigurationsWithResponse(ctx context.Context, params *GetAgentConfigurationsParams, reqEditors ...RequestEditorFn) (*GetAgentConfigurationsResponse, error) { + rsp, err := c.GetAgentConfigurations(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseGetAgentConfigurationsResponse(rsp) } -// Getter for additional properties for OutputLogstash_Secrets_Ssl. Returns the specified -// element and whether it was found -func (a OutputLogstash_Secrets_Ssl) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// CreateUpdateAgentConfigurationWithBodyWithResponse request with arbitrary body returning *CreateUpdateAgentConfigurationResponse +func (c *ClientWithResponses) CreateUpdateAgentConfigurationWithBodyWithResponse(ctx context.Context, params *CreateUpdateAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateUpdateAgentConfigurationResponse, error) { + rsp, err := c.CreateUpdateAgentConfigurationWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseCreateUpdateAgentConfigurationResponse(rsp) } -// Setter for additional properties for OutputLogstash_Secrets_Ssl -func (a *OutputLogstash_Secrets_Ssl) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) CreateUpdateAgentConfigurationWithResponse(ctx context.Context, params *CreateUpdateAgentConfigurationParams, body CreateUpdateAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateUpdateAgentConfigurationResponse, error) { + rsp, err := c.CreateUpdateAgentConfiguration(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseCreateUpdateAgentConfigurationResponse(rsp) } -// Override default JSON handling for OutputLogstash_Secrets_Ssl to handle AdditionalProperties -func (a *OutputLogstash_Secrets_Ssl) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// GetAgentNameForServiceWithResponse request returning *GetAgentNameForServiceResponse +func (c *ClientWithResponses) GetAgentNameForServiceWithResponse(ctx context.Context, params *GetAgentNameForServiceParams, reqEditors ...RequestEditorFn) (*GetAgentNameForServiceResponse, error) { + rsp, err := c.GetAgentNameForService(ctx, params, reqEditors...) if err != nil { - return err + return nil, err } + return ParseGetAgentNameForServiceResponse(rsp) +} - if raw, found := object["key"]; found { - err = json.Unmarshal(raw, &a.Key) - if err != nil { - return fmt.Errorf("error reading 'key': %w", err) - } - delete(object, "key") +// GetEnvironmentsForServiceWithResponse request returning *GetEnvironmentsForServiceResponse +func (c *ClientWithResponses) GetEnvironmentsForServiceWithResponse(ctx context.Context, params *GetEnvironmentsForServiceParams, reqEditors ...RequestEditorFn) (*GetEnvironmentsForServiceResponse, error) { + rsp, err := c.GetEnvironmentsForService(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetEnvironmentsForServiceResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// SearchSingleConfigurationWithBodyWithResponse request with arbitrary body returning *SearchSingleConfigurationResponse +func (c *ClientWithResponses) SearchSingleConfigurationWithBodyWithResponse(ctx context.Context, params *SearchSingleConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SearchSingleConfigurationResponse, error) { + rsp, err := c.SearchSingleConfigurationWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseSearchSingleConfigurationResponse(rsp) } -// Override default JSON handling for OutputLogstash_Secrets_Ssl to handle AdditionalProperties -func (a OutputLogstash_Secrets_Ssl) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +func (c *ClientWithResponses) SearchSingleConfigurationWithResponse(ctx context.Context, params *SearchSingleConfigurationParams, body SearchSingleConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*SearchSingleConfigurationResponse, error) { + rsp, err := c.SearchSingleConfiguration(ctx, params, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseSearchSingleConfigurationResponse(rsp) +} - if a.Key != nil { - object["key"], err = json.Marshal(a.Key) - if err != nil { - return nil, fmt.Errorf("error marshaling 'key': %w", err) - } +// GetSingleAgentConfigurationWithResponse request returning *GetSingleAgentConfigurationResponse +func (c *ClientWithResponses) GetSingleAgentConfigurationWithResponse(ctx context.Context, params *GetSingleAgentConfigurationParams, reqEditors ...RequestEditorFn) (*GetSingleAgentConfigurationResponse, error) { + rsp, err := c.GetSingleAgentConfiguration(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetSingleAgentConfigurationResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// GetSourceMapsWithResponse request returning *GetSourceMapsResponse +func (c *ClientWithResponses) GetSourceMapsWithResponse(ctx context.Context, params *GetSourceMapsParams, reqEditors ...RequestEditorFn) (*GetSourceMapsResponse, error) { + rsp, err := c.GetSourceMaps(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseGetSourceMapsResponse(rsp) } -// Getter for additional properties for OutputLogstash_Secrets. Returns the specified -// element and whether it was found -func (a OutputLogstash_Secrets) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// UploadSourceMapWithBodyWithResponse request with arbitrary body returning *UploadSourceMapResponse +func (c *ClientWithResponses) UploadSourceMapWithBodyWithResponse(ctx context.Context, params *UploadSourceMapParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UploadSourceMapResponse, error) { + rsp, err := c.UploadSourceMapWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseUploadSourceMapResponse(rsp) } -// Setter for additional properties for OutputLogstash_Secrets -func (a *OutputLogstash_Secrets) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// DeleteSourceMapWithResponse request returning *DeleteSourceMapResponse +func (c *ClientWithResponses) DeleteSourceMapWithResponse(ctx context.Context, id string, params *DeleteSourceMapParams, reqEditors ...RequestEditorFn) (*DeleteSourceMapResponse, error) { + rsp, err := c.DeleteSourceMap(ctx, id, params, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseDeleteSourceMapResponse(rsp) } -// Override default JSON handling for OutputLogstash_Secrets to handle AdditionalProperties -func (a *OutputLogstash_Secrets) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// DeleteAssetCriticalityRecordWithResponse request returning *DeleteAssetCriticalityRecordResponse +func (c *ClientWithResponses) DeleteAssetCriticalityRecordWithResponse(ctx context.Context, params *DeleteAssetCriticalityRecordParams, reqEditors ...RequestEditorFn) (*DeleteAssetCriticalityRecordResponse, error) { + rsp, err := c.DeleteAssetCriticalityRecord(ctx, params, reqEditors...) if err != nil { - return err + return nil, err } + return ParseDeleteAssetCriticalityRecordResponse(rsp) +} - if raw, found := object["ssl"]; found { - err = json.Unmarshal(raw, &a.Ssl) - if err != nil { - return fmt.Errorf("error reading 'ssl': %w", err) - } - delete(object, "ssl") +// GetAssetCriticalityRecordWithResponse request returning *GetAssetCriticalityRecordResponse +func (c *ClientWithResponses) GetAssetCriticalityRecordWithResponse(ctx context.Context, params *GetAssetCriticalityRecordParams, reqEditors ...RequestEditorFn) (*GetAssetCriticalityRecordResponse, error) { + rsp, err := c.GetAssetCriticalityRecord(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetAssetCriticalityRecordResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// CreateAssetCriticalityRecordWithBodyWithResponse request with arbitrary body returning *CreateAssetCriticalityRecordResponse +func (c *ClientWithResponses) CreateAssetCriticalityRecordWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAssetCriticalityRecordResponse, error) { + rsp, err := c.CreateAssetCriticalityRecordWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseCreateAssetCriticalityRecordResponse(rsp) } -// Override default JSON handling for OutputLogstash_Secrets to handle AdditionalProperties -func (a OutputLogstash_Secrets) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +func (c *ClientWithResponses) CreateAssetCriticalityRecordWithResponse(ctx context.Context, body CreateAssetCriticalityRecordJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAssetCriticalityRecordResponse, error) { + rsp, err := c.CreateAssetCriticalityRecord(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateAssetCriticalityRecordResponse(rsp) +} - if a.Ssl != nil { - object["ssl"], err = json.Marshal(a.Ssl) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ssl': %w", err) - } +// BulkUpsertAssetCriticalityRecordsWithBodyWithResponse request with arbitrary body returning *BulkUpsertAssetCriticalityRecordsResponse +func (c *ClientWithResponses) BulkUpsertAssetCriticalityRecordsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkUpsertAssetCriticalityRecordsResponse, error) { + rsp, err := c.BulkUpsertAssetCriticalityRecordsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseBulkUpsertAssetCriticalityRecordsResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) BulkUpsertAssetCriticalityRecordsWithResponse(ctx context.Context, body BulkUpsertAssetCriticalityRecordsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkUpsertAssetCriticalityRecordsResponse, error) { + rsp, err := c.BulkUpsertAssetCriticalityRecords(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseBulkUpsertAssetCriticalityRecordsResponse(rsp) } -// Getter for additional properties for OutputRemoteElasticsearch. Returns the specified -// element and whether it was found -func (a OutputRemoteElasticsearch) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// FindAssetCriticalityRecordsWithResponse request returning *FindAssetCriticalityRecordsResponse +func (c *ClientWithResponses) FindAssetCriticalityRecordsWithResponse(ctx context.Context, params *FindAssetCriticalityRecordsParams, reqEditors ...RequestEditorFn) (*FindAssetCriticalityRecordsResponse, error) { + rsp, err := c.FindAssetCriticalityRecords(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return + return ParseFindAssetCriticalityRecordsResponse(rsp) } -// Setter for additional properties for OutputRemoteElasticsearch -func (a *OutputRemoteElasticsearch) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// DeleteCaseDefaultSpaceWithResponse request returning *DeleteCaseDefaultSpaceResponse +func (c *ClientWithResponses) DeleteCaseDefaultSpaceWithResponse(ctx context.Context, params *DeleteCaseDefaultSpaceParams, reqEditors ...RequestEditorFn) (*DeleteCaseDefaultSpaceResponse, error) { + rsp, err := c.DeleteCaseDefaultSpace(ctx, params, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseDeleteCaseDefaultSpaceResponse(rsp) } -// Override default JSON handling for OutputRemoteElasticsearch to handle AdditionalProperties -func (a *OutputRemoteElasticsearch) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// UpdateCaseDefaultSpaceWithBodyWithResponse request with arbitrary body returning *UpdateCaseDefaultSpaceResponse +func (c *ClientWithResponses) UpdateCaseDefaultSpaceWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateCaseDefaultSpaceResponse, error) { + rsp, err := c.UpdateCaseDefaultSpaceWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseUpdateCaseDefaultSpaceResponse(rsp) +} - if raw, found := object["allow_edit"]; found { - err = json.Unmarshal(raw, &a.AllowEdit) - if err != nil { - return fmt.Errorf("error reading 'allow_edit': %w", err) - } - delete(object, "allow_edit") +func (c *ClientWithResponses) UpdateCaseDefaultSpaceWithResponse(ctx context.Context, body UpdateCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateCaseDefaultSpaceResponse, error) { + rsp, err := c.UpdateCaseDefaultSpace(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateCaseDefaultSpaceResponse(rsp) +} - if raw, found := object["ca_sha256"]; found { - err = json.Unmarshal(raw, &a.CaSha256) - if err != nil { - return fmt.Errorf("error reading 'ca_sha256': %w", err) - } - delete(object, "ca_sha256") +// CreateCaseDefaultSpaceWithBodyWithResponse request with arbitrary body returning *CreateCaseDefaultSpaceResponse +func (c *ClientWithResponses) CreateCaseDefaultSpaceWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateCaseDefaultSpaceResponse, error) { + rsp, err := c.CreateCaseDefaultSpaceWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateCaseDefaultSpaceResponse(rsp) +} - if raw, found := object["ca_trusted_fingerprint"]; found { - err = json.Unmarshal(raw, &a.CaTrustedFingerprint) - if err != nil { - return fmt.Errorf("error reading 'ca_trusted_fingerprint': %w", err) - } - delete(object, "ca_trusted_fingerprint") +func (c *ClientWithResponses) CreateCaseDefaultSpaceWithResponse(ctx context.Context, body CreateCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateCaseDefaultSpaceResponse, error) { + rsp, err := c.CreateCaseDefaultSpace(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateCaseDefaultSpaceResponse(rsp) +} - if raw, found := object["config_yaml"]; found { - err = json.Unmarshal(raw, &a.ConfigYaml) - if err != nil { - return fmt.Errorf("error reading 'config_yaml': %w", err) - } - delete(object, "config_yaml") +// FindCasesDefaultSpaceWithResponse request returning *FindCasesDefaultSpaceResponse +func (c *ClientWithResponses) FindCasesDefaultSpaceWithResponse(ctx context.Context, params *FindCasesDefaultSpaceParams, reqEditors ...RequestEditorFn) (*FindCasesDefaultSpaceResponse, error) { + rsp, err := c.FindCasesDefaultSpace(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseFindCasesDefaultSpaceResponse(rsp) +} - if raw, found := object["hosts"]; found { - err = json.Unmarshal(raw, &a.Hosts) - if err != nil { - return fmt.Errorf("error reading 'hosts': %w", err) - } - delete(object, "hosts") +// GetCasesByAlertDefaultSpaceWithResponse request returning *GetCasesByAlertDefaultSpaceResponse +func (c *ClientWithResponses) GetCasesByAlertDefaultSpaceWithResponse(ctx context.Context, alertId CasesAlertId, params *GetCasesByAlertDefaultSpaceParams, reqEditors ...RequestEditorFn) (*GetCasesByAlertDefaultSpaceResponse, error) { + rsp, err := c.GetCasesByAlertDefaultSpace(ctx, alertId, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetCasesByAlertDefaultSpaceResponse(rsp) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// GetCaseConfigurationDefaultSpaceWithResponse request returning *GetCaseConfigurationDefaultSpaceResponse +func (c *ClientWithResponses) GetCaseConfigurationDefaultSpaceWithResponse(ctx context.Context, params *GetCaseConfigurationDefaultSpaceParams, reqEditors ...RequestEditorFn) (*GetCaseConfigurationDefaultSpaceResponse, error) { + rsp, err := c.GetCaseConfigurationDefaultSpace(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetCaseConfigurationDefaultSpaceResponse(rsp) +} - if raw, found := object["is_default"]; found { - err = json.Unmarshal(raw, &a.IsDefault) - if err != nil { - return fmt.Errorf("error reading 'is_default': %w", err) - } - delete(object, "is_default") +// SetCaseConfigurationDefaultSpaceWithBodyWithResponse request with arbitrary body returning *SetCaseConfigurationDefaultSpaceResponse +func (c *ClientWithResponses) SetCaseConfigurationDefaultSpaceWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SetCaseConfigurationDefaultSpaceResponse, error) { + rsp, err := c.SetCaseConfigurationDefaultSpaceWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseSetCaseConfigurationDefaultSpaceResponse(rsp) +} - if raw, found := object["is_default_monitoring"]; found { - err = json.Unmarshal(raw, &a.IsDefaultMonitoring) - if err != nil { - return fmt.Errorf("error reading 'is_default_monitoring': %w", err) - } - delete(object, "is_default_monitoring") +func (c *ClientWithResponses) SetCaseConfigurationDefaultSpaceWithResponse(ctx context.Context, body SetCaseConfigurationDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*SetCaseConfigurationDefaultSpaceResponse, error) { + rsp, err := c.SetCaseConfigurationDefaultSpace(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseSetCaseConfigurationDefaultSpaceResponse(rsp) +} - if raw, found := object["is_internal"]; found { - err = json.Unmarshal(raw, &a.IsInternal) - if err != nil { - return fmt.Errorf("error reading 'is_internal': %w", err) - } - delete(object, "is_internal") +// FindCaseConnectorsDefaultSpaceWithResponse request returning *FindCaseConnectorsDefaultSpaceResponse +func (c *ClientWithResponses) FindCaseConnectorsDefaultSpaceWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*FindCaseConnectorsDefaultSpaceResponse, error) { + rsp, err := c.FindCaseConnectorsDefaultSpace(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseFindCaseConnectorsDefaultSpaceResponse(rsp) +} - if raw, found := object["is_preconfigured"]; found { - err = json.Unmarshal(raw, &a.IsPreconfigured) - if err != nil { - return fmt.Errorf("error reading 'is_preconfigured': %w", err) - } - delete(object, "is_preconfigured") +// UpdateCaseConfigurationDefaultSpaceWithBodyWithResponse request with arbitrary body returning *UpdateCaseConfigurationDefaultSpaceResponse +func (c *ClientWithResponses) UpdateCaseConfigurationDefaultSpaceWithBodyWithResponse(ctx context.Context, configurationId CasesConfigurationId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateCaseConfigurationDefaultSpaceResponse, error) { + rsp, err := c.UpdateCaseConfigurationDefaultSpaceWithBody(ctx, configurationId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateCaseConfigurationDefaultSpaceResponse(rsp) +} - if raw, found := object["kibana_api_key"]; found { - err = json.Unmarshal(raw, &a.KibanaApiKey) - if err != nil { - return fmt.Errorf("error reading 'kibana_api_key': %w", err) - } - delete(object, "kibana_api_key") +func (c *ClientWithResponses) UpdateCaseConfigurationDefaultSpaceWithResponse(ctx context.Context, configurationId CasesConfigurationId, body UpdateCaseConfigurationDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateCaseConfigurationDefaultSpaceResponse, error) { + rsp, err := c.UpdateCaseConfigurationDefaultSpace(ctx, configurationId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateCaseConfigurationDefaultSpaceResponse(rsp) +} - if raw, found := object["kibana_url"]; found { - err = json.Unmarshal(raw, &a.KibanaUrl) - if err != nil { - return fmt.Errorf("error reading 'kibana_url': %w", err) - } - delete(object, "kibana_url") +// GetCaseReportersDefaultSpaceWithResponse request returning *GetCaseReportersDefaultSpaceResponse +func (c *ClientWithResponses) GetCaseReportersDefaultSpaceWithResponse(ctx context.Context, params *GetCaseReportersDefaultSpaceParams, reqEditors ...RequestEditorFn) (*GetCaseReportersDefaultSpaceResponse, error) { + rsp, err := c.GetCaseReportersDefaultSpace(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetCaseReportersDefaultSpaceResponse(rsp) +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// GetCaseTagsDefaultSpaceWithResponse request returning *GetCaseTagsDefaultSpaceResponse +func (c *ClientWithResponses) GetCaseTagsDefaultSpaceWithResponse(ctx context.Context, params *GetCaseTagsDefaultSpaceParams, reqEditors ...RequestEditorFn) (*GetCaseTagsDefaultSpaceResponse, error) { + rsp, err := c.GetCaseTagsDefaultSpace(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetCaseTagsDefaultSpaceResponse(rsp) +} - if raw, found := object["preset"]; found { - err = json.Unmarshal(raw, &a.Preset) - if err != nil { - return fmt.Errorf("error reading 'preset': %w", err) - } - delete(object, "preset") +// GetCaseDefaultSpaceWithResponse request returning *GetCaseDefaultSpaceResponse +func (c *ClientWithResponses) GetCaseDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*GetCaseDefaultSpaceResponse, error) { + rsp, err := c.GetCaseDefaultSpace(ctx, caseId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetCaseDefaultSpaceResponse(rsp) +} - if raw, found := object["proxy_id"]; found { - err = json.Unmarshal(raw, &a.ProxyId) - if err != nil { - return fmt.Errorf("error reading 'proxy_id': %w", err) - } - delete(object, "proxy_id") +// GetCaseAlertsDefaultSpaceWithResponse request returning *GetCaseAlertsDefaultSpaceResponse +func (c *ClientWithResponses) GetCaseAlertsDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*GetCaseAlertsDefaultSpaceResponse, error) { + rsp, err := c.GetCaseAlertsDefaultSpace(ctx, caseId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetCaseAlertsDefaultSpaceResponse(rsp) +} - if raw, found := object["secrets"]; found { - err = json.Unmarshal(raw, &a.Secrets) - if err != nil { - return fmt.Errorf("error reading 'secrets': %w", err) - } - delete(object, "secrets") +// DeleteCaseCommentsDefaultSpaceWithResponse request returning *DeleteCaseCommentsDefaultSpaceResponse +func (c *ClientWithResponses) DeleteCaseCommentsDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, reqEditors ...RequestEditorFn) (*DeleteCaseCommentsDefaultSpaceResponse, error) { + rsp, err := c.DeleteCaseCommentsDefaultSpace(ctx, caseId, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteCaseCommentsDefaultSpaceResponse(rsp) +} - if raw, found := object["service_token"]; found { - err = json.Unmarshal(raw, &a.ServiceToken) - if err != nil { - return fmt.Errorf("error reading 'service_token': %w", err) - } - delete(object, "service_token") +// UpdateCaseCommentDefaultSpaceWithBodyWithResponse request with arbitrary body returning *UpdateCaseCommentDefaultSpaceResponse +func (c *ClientWithResponses) UpdateCaseCommentDefaultSpaceWithBodyWithResponse(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateCaseCommentDefaultSpaceResponse, error) { + rsp, err := c.UpdateCaseCommentDefaultSpaceWithBody(ctx, caseId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateCaseCommentDefaultSpaceResponse(rsp) +} - if raw, found := object["shipper"]; found { - err = json.Unmarshal(raw, &a.Shipper) - if err != nil { - return fmt.Errorf("error reading 'shipper': %w", err) - } - delete(object, "shipper") +func (c *ClientWithResponses) UpdateCaseCommentDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, body UpdateCaseCommentDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateCaseCommentDefaultSpaceResponse, error) { + rsp, err := c.UpdateCaseCommentDefaultSpace(ctx, caseId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateCaseCommentDefaultSpaceResponse(rsp) +} - if raw, found := object["ssl"]; found { - err = json.Unmarshal(raw, &a.Ssl) - if err != nil { - return fmt.Errorf("error reading 'ssl': %w", err) - } - delete(object, "ssl") +// AddCaseCommentDefaultSpaceWithBodyWithResponse request with arbitrary body returning *AddCaseCommentDefaultSpaceResponse +func (c *ClientWithResponses) AddCaseCommentDefaultSpaceWithBodyWithResponse(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*AddCaseCommentDefaultSpaceResponse, error) { + rsp, err := c.AddCaseCommentDefaultSpaceWithBody(ctx, caseId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseAddCaseCommentDefaultSpaceResponse(rsp) +} - if raw, found := object["sync_integrations"]; found { - err = json.Unmarshal(raw, &a.SyncIntegrations) - if err != nil { - return fmt.Errorf("error reading 'sync_integrations': %w", err) - } - delete(object, "sync_integrations") +func (c *ClientWithResponses) AddCaseCommentDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, body AddCaseCommentDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*AddCaseCommentDefaultSpaceResponse, error) { + rsp, err := c.AddCaseCommentDefaultSpace(ctx, caseId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseAddCaseCommentDefaultSpaceResponse(rsp) +} - if raw, found := object["sync_uninstalled_integrations"]; found { - err = json.Unmarshal(raw, &a.SyncUninstalledIntegrations) - if err != nil { - return fmt.Errorf("error reading 'sync_uninstalled_integrations': %w", err) - } - delete(object, "sync_uninstalled_integrations") +// FindCaseCommentsDefaultSpaceWithResponse request returning *FindCaseCommentsDefaultSpaceResponse +func (c *ClientWithResponses) FindCaseCommentsDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, params *FindCaseCommentsDefaultSpaceParams, reqEditors ...RequestEditorFn) (*FindCaseCommentsDefaultSpaceResponse, error) { + rsp, err := c.FindCaseCommentsDefaultSpace(ctx, caseId, params, reqEditors...) + if err != nil { + return nil, err } + return ParseFindCaseCommentsDefaultSpaceResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +// DeleteCaseCommentDefaultSpaceWithResponse request returning *DeleteCaseCommentDefaultSpaceResponse +func (c *ClientWithResponses) DeleteCaseCommentDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, commentId CasesCommentId, reqEditors ...RequestEditorFn) (*DeleteCaseCommentDefaultSpaceResponse, error) { + rsp, err := c.DeleteCaseCommentDefaultSpace(ctx, caseId, commentId, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteCaseCommentDefaultSpaceResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// GetCaseCommentDefaultSpaceWithResponse request returning *GetCaseCommentDefaultSpaceResponse +func (c *ClientWithResponses) GetCaseCommentDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, commentId CasesCommentId, reqEditors ...RequestEditorFn) (*GetCaseCommentDefaultSpaceResponse, error) { + rsp, err := c.GetCaseCommentDefaultSpace(ctx, caseId, commentId, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseGetCaseCommentDefaultSpaceResponse(rsp) } -// Override default JSON handling for OutputRemoteElasticsearch to handle AdditionalProperties -func (a OutputRemoteElasticsearch) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// PushCaseDefaultSpaceWithBodyWithResponse request with arbitrary body returning *PushCaseDefaultSpaceResponse +func (c *ClientWithResponses) PushCaseDefaultSpaceWithBodyWithResponse(ctx context.Context, caseId CasesCaseId, connectorId CasesConnectorId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PushCaseDefaultSpaceResponse, error) { + rsp, err := c.PushCaseDefaultSpaceWithBody(ctx, caseId, connectorId, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePushCaseDefaultSpaceResponse(rsp) +} - if a.AllowEdit != nil { - object["allow_edit"], err = json.Marshal(a.AllowEdit) - if err != nil { - return nil, fmt.Errorf("error marshaling 'allow_edit': %w", err) - } +func (c *ClientWithResponses) PushCaseDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, connectorId CasesConnectorId, body PushCaseDefaultSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*PushCaseDefaultSpaceResponse, error) { + rsp, err := c.PushCaseDefaultSpace(ctx, caseId, connectorId, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePushCaseDefaultSpaceResponse(rsp) +} - if a.CaSha256 != nil { - object["ca_sha256"], err = json.Marshal(a.CaSha256) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ca_sha256': %w", err) - } +// AddCaseFileDefaultSpaceWithBodyWithResponse request with arbitrary body returning *AddCaseFileDefaultSpaceResponse +func (c *ClientWithResponses) AddCaseFileDefaultSpaceWithBodyWithResponse(ctx context.Context, caseId CasesCaseId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*AddCaseFileDefaultSpaceResponse, error) { + rsp, err := c.AddCaseFileDefaultSpaceWithBody(ctx, caseId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseAddCaseFileDefaultSpaceResponse(rsp) +} - if a.CaTrustedFingerprint != nil { - object["ca_trusted_fingerprint"], err = json.Marshal(a.CaTrustedFingerprint) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ca_trusted_fingerprint': %w", err) - } +// FindCaseActivityDefaultSpaceWithResponse request returning *FindCaseActivityDefaultSpaceResponse +func (c *ClientWithResponses) FindCaseActivityDefaultSpaceWithResponse(ctx context.Context, caseId CasesCaseId, params *FindCaseActivityDefaultSpaceParams, reqEditors ...RequestEditorFn) (*FindCaseActivityDefaultSpaceResponse, error) { + rsp, err := c.FindCaseActivityDefaultSpace(ctx, caseId, params, reqEditors...) + if err != nil { + return nil, err } + return ParseFindCaseActivityDefaultSpaceResponse(rsp) +} - if a.ConfigYaml != nil { - object["config_yaml"], err = json.Marshal(a.ConfigYaml) - if err != nil { - return nil, fmt.Errorf("error marshaling 'config_yaml': %w", err) - } +// UpdateFieldsMetadataDefaultWithBodyWithResponse request with arbitrary body returning *UpdateFieldsMetadataDefaultResponse +func (c *ClientWithResponses) UpdateFieldsMetadataDefaultWithBodyWithResponse(ctx context.Context, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateFieldsMetadataDefaultResponse, error) { + rsp, err := c.UpdateFieldsMetadataDefaultWithBody(ctx, viewId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateFieldsMetadataDefaultResponse(rsp) +} - object["hosts"], err = json.Marshal(a.Hosts) +func (c *ClientWithResponses) UpdateFieldsMetadataDefaultWithResponse(ctx context.Context, viewId DataViewsViewId, body UpdateFieldsMetadataDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateFieldsMetadataDefaultResponse, error) { + rsp, err := c.UpdateFieldsMetadataDefault(ctx, viewId, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'hosts': %w", err) + return nil, err } + return ParseUpdateFieldsMetadataDefaultResponse(rsp) +} - if a.Id != nil { - object["id"], err = json.Marshal(a.Id) - if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) - } +// CreateRuntimeFieldDefaultWithBodyWithResponse request with arbitrary body returning *CreateRuntimeFieldDefaultResponse +func (c *ClientWithResponses) CreateRuntimeFieldDefaultWithBodyWithResponse(ctx context.Context, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateRuntimeFieldDefaultResponse, error) { + rsp, err := c.CreateRuntimeFieldDefaultWithBody(ctx, viewId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateRuntimeFieldDefaultResponse(rsp) +} - if a.IsDefault != nil { - object["is_default"], err = json.Marshal(a.IsDefault) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_default': %w", err) - } +func (c *ClientWithResponses) CreateRuntimeFieldDefaultWithResponse(ctx context.Context, viewId DataViewsViewId, body CreateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateRuntimeFieldDefaultResponse, error) { + rsp, err := c.CreateRuntimeFieldDefault(ctx, viewId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateRuntimeFieldDefaultResponse(rsp) +} - if a.IsDefaultMonitoring != nil { - object["is_default_monitoring"], err = json.Marshal(a.IsDefaultMonitoring) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_default_monitoring': %w", err) - } +// CreateUpdateRuntimeFieldDefaultWithBodyWithResponse request with arbitrary body returning *CreateUpdateRuntimeFieldDefaultResponse +func (c *ClientWithResponses) CreateUpdateRuntimeFieldDefaultWithBodyWithResponse(ctx context.Context, viewId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateUpdateRuntimeFieldDefaultResponse, error) { + rsp, err := c.CreateUpdateRuntimeFieldDefaultWithBody(ctx, viewId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateUpdateRuntimeFieldDefaultResponse(rsp) +} - if a.IsInternal != nil { - object["is_internal"], err = json.Marshal(a.IsInternal) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_internal': %w", err) - } +func (c *ClientWithResponses) CreateUpdateRuntimeFieldDefaultWithResponse(ctx context.Context, viewId string, body CreateUpdateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateUpdateRuntimeFieldDefaultResponse, error) { + rsp, err := c.CreateUpdateRuntimeFieldDefault(ctx, viewId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateUpdateRuntimeFieldDefaultResponse(rsp) +} - if a.IsPreconfigured != nil { - object["is_preconfigured"], err = json.Marshal(a.IsPreconfigured) - if err != nil { - return nil, fmt.Errorf("error marshaling 'is_preconfigured': %w", err) - } +// DeleteRuntimeFieldDefaultWithResponse request returning *DeleteRuntimeFieldDefaultResponse +func (c *ClientWithResponses) DeleteRuntimeFieldDefaultWithResponse(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, reqEditors ...RequestEditorFn) (*DeleteRuntimeFieldDefaultResponse, error) { + rsp, err := c.DeleteRuntimeFieldDefault(ctx, viewId, fieldName, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteRuntimeFieldDefaultResponse(rsp) +} - if a.KibanaApiKey != nil { - object["kibana_api_key"], err = json.Marshal(a.KibanaApiKey) - if err != nil { - return nil, fmt.Errorf("error marshaling 'kibana_api_key': %w", err) - } +// GetRuntimeFieldDefaultWithResponse request returning *GetRuntimeFieldDefaultResponse +func (c *ClientWithResponses) GetRuntimeFieldDefaultWithResponse(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, reqEditors ...RequestEditorFn) (*GetRuntimeFieldDefaultResponse, error) { + rsp, err := c.GetRuntimeFieldDefault(ctx, viewId, fieldName, reqEditors...) + if err != nil { + return nil, err } + return ParseGetRuntimeFieldDefaultResponse(rsp) +} - if a.KibanaUrl != nil { - object["kibana_url"], err = json.Marshal(a.KibanaUrl) - if err != nil { - return nil, fmt.Errorf("error marshaling 'kibana_url': %w", err) - } +// UpdateRuntimeFieldDefaultWithBodyWithResponse request with arbitrary body returning *UpdateRuntimeFieldDefaultResponse +func (c *ClientWithResponses) UpdateRuntimeFieldDefaultWithBodyWithResponse(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateRuntimeFieldDefaultResponse, error) { + rsp, err := c.UpdateRuntimeFieldDefaultWithBody(ctx, viewId, fieldName, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateRuntimeFieldDefaultResponse(rsp) +} - object["name"], err = json.Marshal(a.Name) +func (c *ClientWithResponses) UpdateRuntimeFieldDefaultWithResponse(ctx context.Context, viewId DataViewsViewId, fieldName DataViewsFieldName, body UpdateRuntimeFieldDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateRuntimeFieldDefaultResponse, error) { + rsp, err := c.UpdateRuntimeFieldDefault(ctx, viewId, fieldName, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) + return nil, err } + return ParseUpdateRuntimeFieldDefaultResponse(rsp) +} - if a.Preset != nil { - object["preset"], err = json.Marshal(a.Preset) - if err != nil { - return nil, fmt.Errorf("error marshaling 'preset': %w", err) - } +// GetDefaultDataViewDefaultWithResponse request returning *GetDefaultDataViewDefaultResponse +func (c *ClientWithResponses) GetDefaultDataViewDefaultWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetDefaultDataViewDefaultResponse, error) { + rsp, err := c.GetDefaultDataViewDefault(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetDefaultDataViewDefaultResponse(rsp) +} - if a.ProxyId != nil { - object["proxy_id"], err = json.Marshal(a.ProxyId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'proxy_id': %w", err) - } +// SetDefaultDatailViewDefaultWithBodyWithResponse request with arbitrary body returning *SetDefaultDatailViewDefaultResponse +func (c *ClientWithResponses) SetDefaultDatailViewDefaultWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SetDefaultDatailViewDefaultResponse, error) { + rsp, err := c.SetDefaultDatailViewDefaultWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseSetDefaultDatailViewDefaultResponse(rsp) +} - if a.Secrets != nil { - object["secrets"], err = json.Marshal(a.Secrets) - if err != nil { - return nil, fmt.Errorf("error marshaling 'secrets': %w", err) - } +func (c *ClientWithResponses) SetDefaultDatailViewDefaultWithResponse(ctx context.Context, body SetDefaultDatailViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*SetDefaultDatailViewDefaultResponse, error) { + rsp, err := c.SetDefaultDatailViewDefault(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseSetDefaultDatailViewDefaultResponse(rsp) +} - if a.ServiceToken != nil { - object["service_token"], err = json.Marshal(a.ServiceToken) - if err != nil { - return nil, fmt.Errorf("error marshaling 'service_token': %w", err) - } +// SwapDataViewsDefaultWithBodyWithResponse request with arbitrary body returning *SwapDataViewsDefaultResponse +func (c *ClientWithResponses) SwapDataViewsDefaultWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SwapDataViewsDefaultResponse, error) { + rsp, err := c.SwapDataViewsDefaultWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseSwapDataViewsDefaultResponse(rsp) +} - if a.Shipper != nil { - object["shipper"], err = json.Marshal(a.Shipper) - if err != nil { - return nil, fmt.Errorf("error marshaling 'shipper': %w", err) - } +func (c *ClientWithResponses) SwapDataViewsDefaultWithResponse(ctx context.Context, body SwapDataViewsDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*SwapDataViewsDefaultResponse, error) { + rsp, err := c.SwapDataViewsDefault(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseSwapDataViewsDefaultResponse(rsp) +} - if a.Ssl != nil { - object["ssl"], err = json.Marshal(a.Ssl) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ssl': %w", err) - } +// PreviewSwapDataViewsDefaultWithBodyWithResponse request with arbitrary body returning *PreviewSwapDataViewsDefaultResponse +func (c *ClientWithResponses) PreviewSwapDataViewsDefaultWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PreviewSwapDataViewsDefaultResponse, error) { + rsp, err := c.PreviewSwapDataViewsDefaultWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePreviewSwapDataViewsDefaultResponse(rsp) +} - if a.SyncIntegrations != nil { - object["sync_integrations"], err = json.Marshal(a.SyncIntegrations) - if err != nil { - return nil, fmt.Errorf("error marshaling 'sync_integrations': %w", err) - } +func (c *ClientWithResponses) PreviewSwapDataViewsDefaultWithResponse(ctx context.Context, body PreviewSwapDataViewsDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*PreviewSwapDataViewsDefaultResponse, error) { + rsp, err := c.PreviewSwapDataViewsDefault(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePreviewSwapDataViewsDefaultResponse(rsp) +} - if a.SyncUninstalledIntegrations != nil { - object["sync_uninstalled_integrations"], err = json.Marshal(a.SyncUninstalledIntegrations) - if err != nil { - return nil, fmt.Errorf("error marshaling 'sync_uninstalled_integrations': %w", err) - } +// DeleteAlertsIndexWithResponse request returning *DeleteAlertsIndexResponse +func (c *ClientWithResponses) DeleteAlertsIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*DeleteAlertsIndexResponse, error) { + rsp, err := c.DeleteAlertsIndex(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteAlertsIndexResponse(rsp) +} - object["type"], err = json.Marshal(a.Type) +// ReadAlertsIndexWithResponse request returning *ReadAlertsIndexResponse +func (c *ClientWithResponses) ReadAlertsIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadAlertsIndexResponse, error) { + rsp, err := c.ReadAlertsIndex(ctx, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) + return nil, err } + return ParseReadAlertsIndexResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// CreateAlertsIndexWithResponse request returning *CreateAlertsIndexResponse +func (c *ClientWithResponses) CreateAlertsIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*CreateAlertsIndexResponse, error) { + rsp, err := c.CreateAlertsIndex(ctx, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseCreateAlertsIndexResponse(rsp) } -// Getter for additional properties for OutputRemoteElasticsearchSecretsServiceToken0. Returns the specified -// element and whether it was found -func (a OutputRemoteElasticsearchSecretsServiceToken0) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// ReadPrivilegesWithResponse request returning *ReadPrivilegesResponse +func (c *ClientWithResponses) ReadPrivilegesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadPrivilegesResponse, error) { + rsp, err := c.ReadPrivileges(ctx, reqEditors...) + if err != nil { + return nil, err } - return + return ParseReadPrivilegesResponse(rsp) } -// Setter for additional properties for OutputRemoteElasticsearchSecretsServiceToken0 -func (a *OutputRemoteElasticsearchSecretsServiceToken0) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// DeleteRuleWithResponse request returning *DeleteRuleResponse +func (c *ClientWithResponses) DeleteRuleWithResponse(ctx context.Context, params *DeleteRuleParams, reqEditors ...RequestEditorFn) (*DeleteRuleResponse, error) { + rsp, err := c.DeleteRule(ctx, params, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseDeleteRuleResponse(rsp) } -// Override default JSON handling for OutputRemoteElasticsearchSecretsServiceToken0 to handle AdditionalProperties -func (a *OutputRemoteElasticsearchSecretsServiceToken0) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// ReadRuleWithResponse request returning *ReadRuleResponse +func (c *ClientWithResponses) ReadRuleWithResponse(ctx context.Context, params *ReadRuleParams, reqEditors ...RequestEditorFn) (*ReadRuleResponse, error) { + rsp, err := c.ReadRule(ctx, params, reqEditors...) if err != nil { - return err + return nil, err } + return ParseReadRuleResponse(rsp) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// PatchRuleWithBodyWithResponse request with arbitrary body returning *PatchRuleResponse +func (c *ClientWithResponses) PatchRuleWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchRuleResponse, error) { + rsp, err := c.PatchRuleWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePatchRuleResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PatchRuleWithResponse(ctx context.Context, body PatchRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchRuleResponse, error) { + rsp, err := c.PatchRule(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePatchRuleResponse(rsp) } -// Override default JSON handling for OutputRemoteElasticsearchSecretsServiceToken0 to handle AdditionalProperties -func (a OutputRemoteElasticsearchSecretsServiceToken0) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// CreateRuleWithBodyWithResponse request with arbitrary body returning *CreateRuleResponse +func (c *ClientWithResponses) CreateRuleWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateRuleResponse, error) { + rsp, err := c.CreateRuleWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateRuleResponse(rsp) +} - object["id"], err = json.Marshal(a.Id) +func (c *ClientWithResponses) CreateRuleWithResponse(ctx context.Context, body CreateRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateRuleResponse, error) { + rsp, err := c.CreateRule(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) + return nil, err } + return ParseCreateRuleResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// UpdateRuleWithBodyWithResponse request with arbitrary body returning *UpdateRuleResponse +func (c *ClientWithResponses) UpdateRuleWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateRuleResponse, error) { + rsp, err := c.UpdateRuleWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseUpdateRuleResponse(rsp) } -// Getter for additional properties for OutputRemoteElasticsearchSecretsSslKey0. Returns the specified -// element and whether it was found -func (a OutputRemoteElasticsearchSecretsSslKey0) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) UpdateRuleWithResponse(ctx context.Context, body UpdateRuleJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateRuleResponse, error) { + rsp, err := c.UpdateRule(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseUpdateRuleResponse(rsp) } -// Setter for additional properties for OutputRemoteElasticsearchSecretsSslKey0 -func (a *OutputRemoteElasticsearchSecretsSslKey0) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// PerformRulesBulkActionWithBodyWithResponse request with arbitrary body returning *PerformRulesBulkActionResponse +func (c *ClientWithResponses) PerformRulesBulkActionWithBodyWithResponse(ctx context.Context, params *PerformRulesBulkActionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PerformRulesBulkActionResponse, error) { + rsp, err := c.PerformRulesBulkActionWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePerformRulesBulkActionResponse(rsp) } -// Override default JSON handling for OutputRemoteElasticsearchSecretsSslKey0 to handle AdditionalProperties -func (a *OutputRemoteElasticsearchSecretsSslKey0) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +func (c *ClientWithResponses) PerformRulesBulkActionWithResponse(ctx context.Context, params *PerformRulesBulkActionParams, body PerformRulesBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*PerformRulesBulkActionResponse, error) { + rsp, err := c.PerformRulesBulkAction(ctx, params, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePerformRulesBulkActionResponse(rsp) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// ExportRulesWithBodyWithResponse request with arbitrary body returning *ExportRulesResponse +func (c *ClientWithResponses) ExportRulesWithBodyWithResponse(ctx context.Context, params *ExportRulesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ExportRulesResponse, error) { + rsp, err := c.ExportRulesWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseExportRulesResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) ExportRulesWithResponse(ctx context.Context, params *ExportRulesParams, body ExportRulesJSONRequestBody, reqEditors ...RequestEditorFn) (*ExportRulesResponse, error) { + rsp, err := c.ExportRules(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseExportRulesResponse(rsp) } -// Override default JSON handling for OutputRemoteElasticsearchSecretsSslKey0 to handle AdditionalProperties -func (a OutputRemoteElasticsearchSecretsSslKey0) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - object["id"], err = json.Marshal(a.Id) +// FindRulesWithResponse request returning *FindRulesResponse +func (c *ClientWithResponses) FindRulesWithResponse(ctx context.Context, params *FindRulesParams, reqEditors ...RequestEditorFn) (*FindRulesResponse, error) { + rsp, err := c.FindRules(ctx, params, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) + return nil, err } + return ParseFindRulesResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// ImportRulesWithBodyWithResponse request with arbitrary body returning *ImportRulesResponse +func (c *ClientWithResponses) ImportRulesWithBodyWithResponse(ctx context.Context, params *ImportRulesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ImportRulesResponse, error) { + rsp, err := c.ImportRulesWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseImportRulesResponse(rsp) } -// Getter for additional properties for OutputRemoteElasticsearch_Secrets_Ssl. Returns the specified -// element and whether it was found -func (a OutputRemoteElasticsearch_Secrets_Ssl) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// InstallPrebuiltRulesAndTimelinesWithResponse request returning *InstallPrebuiltRulesAndTimelinesResponse +func (c *ClientWithResponses) InstallPrebuiltRulesAndTimelinesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*InstallPrebuiltRulesAndTimelinesResponse, error) { + rsp, err := c.InstallPrebuiltRulesAndTimelines(ctx, reqEditors...) + if err != nil { + return nil, err } - return + return ParseInstallPrebuiltRulesAndTimelinesResponse(rsp) } -// Setter for additional properties for OutputRemoteElasticsearch_Secrets_Ssl -func (a *OutputRemoteElasticsearch_Secrets_Ssl) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// ReadPrebuiltRulesAndTimelinesStatusWithResponse request returning *ReadPrebuiltRulesAndTimelinesStatusResponse +func (c *ClientWithResponses) ReadPrebuiltRulesAndTimelinesStatusWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadPrebuiltRulesAndTimelinesStatusResponse, error) { + rsp, err := c.ReadPrebuiltRulesAndTimelinesStatus(ctx, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseReadPrebuiltRulesAndTimelinesStatusResponse(rsp) } -// Override default JSON handling for OutputRemoteElasticsearch_Secrets_Ssl to handle AdditionalProperties -func (a *OutputRemoteElasticsearch_Secrets_Ssl) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// RulePreviewWithBodyWithResponse request with arbitrary body returning *RulePreviewResponse +func (c *ClientWithResponses) RulePreviewWithBodyWithResponse(ctx context.Context, params *RulePreviewParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RulePreviewResponse, error) { + rsp, err := c.RulePreviewWithBody(ctx, params, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseRulePreviewResponse(rsp) +} - if raw, found := object["key"]; found { - err = json.Unmarshal(raw, &a.Key) - if err != nil { - return fmt.Errorf("error reading 'key': %w", err) - } - delete(object, "key") +func (c *ClientWithResponses) RulePreviewWithResponse(ctx context.Context, params *RulePreviewParams, body RulePreviewJSONRequestBody, reqEditors ...RequestEditorFn) (*RulePreviewResponse, error) { + rsp, err := c.RulePreview(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } + return ParseRulePreviewResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// CreateRuleExceptionListItemsWithBodyWithResponse request with arbitrary body returning *CreateRuleExceptionListItemsResponse +func (c *ClientWithResponses) CreateRuleExceptionListItemsWithBodyWithResponse(ctx context.Context, id SecurityExceptionsAPIRuleId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateRuleExceptionListItemsResponse, error) { + rsp, err := c.CreateRuleExceptionListItemsWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseCreateRuleExceptionListItemsResponse(rsp) } -// Override default JSON handling for OutputRemoteElasticsearch_Secrets_Ssl to handle AdditionalProperties -func (a OutputRemoteElasticsearch_Secrets_Ssl) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +func (c *ClientWithResponses) CreateRuleExceptionListItemsWithResponse(ctx context.Context, id SecurityExceptionsAPIRuleId, body CreateRuleExceptionListItemsJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateRuleExceptionListItemsResponse, error) { + rsp, err := c.CreateRuleExceptionListItems(ctx, id, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateRuleExceptionListItemsResponse(rsp) +} - if a.Key != nil { - object["key"], err = json.Marshal(a.Key) - if err != nil { - return nil, fmt.Errorf("error marshaling 'key': %w", err) - } +// SetAlertAssigneesWithBodyWithResponse request with arbitrary body returning *SetAlertAssigneesResponse +func (c *ClientWithResponses) SetAlertAssigneesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SetAlertAssigneesResponse, error) { + rsp, err := c.SetAlertAssigneesWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseSetAlertAssigneesResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) SetAlertAssigneesWithResponse(ctx context.Context, body SetAlertAssigneesJSONRequestBody, reqEditors ...RequestEditorFn) (*SetAlertAssigneesResponse, error) { + rsp, err := c.SetAlertAssignees(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseSetAlertAssigneesResponse(rsp) } -// Getter for additional properties for OutputRemoteElasticsearch_Secrets. Returns the specified -// element and whether it was found -func (a OutputRemoteElasticsearch_Secrets) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// FinalizeAlertsMigrationWithBodyWithResponse request with arbitrary body returning *FinalizeAlertsMigrationResponse +func (c *ClientWithResponses) FinalizeAlertsMigrationWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*FinalizeAlertsMigrationResponse, error) { + rsp, err := c.FinalizeAlertsMigrationWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseFinalizeAlertsMigrationResponse(rsp) } -// Setter for additional properties for OutputRemoteElasticsearch_Secrets -func (a *OutputRemoteElasticsearch_Secrets) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) FinalizeAlertsMigrationWithResponse(ctx context.Context, body FinalizeAlertsMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*FinalizeAlertsMigrationResponse, error) { + rsp, err := c.FinalizeAlertsMigration(ctx, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseFinalizeAlertsMigrationResponse(rsp) } -// Override default JSON handling for OutputRemoteElasticsearch_Secrets to handle AdditionalProperties -func (a *OutputRemoteElasticsearch_Secrets) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// AlertsMigrationCleanupWithBodyWithResponse request with arbitrary body returning *AlertsMigrationCleanupResponse +func (c *ClientWithResponses) AlertsMigrationCleanupWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*AlertsMigrationCleanupResponse, error) { + rsp, err := c.AlertsMigrationCleanupWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseAlertsMigrationCleanupResponse(rsp) +} - if raw, found := object["service_token"]; found { - err = json.Unmarshal(raw, &a.ServiceToken) - if err != nil { - return fmt.Errorf("error reading 'service_token': %w", err) - } - delete(object, "service_token") +func (c *ClientWithResponses) AlertsMigrationCleanupWithResponse(ctx context.Context, body AlertsMigrationCleanupJSONRequestBody, reqEditors ...RequestEditorFn) (*AlertsMigrationCleanupResponse, error) { + rsp, err := c.AlertsMigrationCleanup(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseAlertsMigrationCleanupResponse(rsp) +} - if raw, found := object["ssl"]; found { - err = json.Unmarshal(raw, &a.Ssl) - if err != nil { - return fmt.Errorf("error reading 'ssl': %w", err) - } - delete(object, "ssl") +// CreateAlertsMigrationWithBodyWithResponse request with arbitrary body returning *CreateAlertsMigrationResponse +func (c *ClientWithResponses) CreateAlertsMigrationWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateAlertsMigrationResponse, error) { + rsp, err := c.CreateAlertsMigrationWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateAlertsMigrationResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) CreateAlertsMigrationWithResponse(ctx context.Context, body CreateAlertsMigrationJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateAlertsMigrationResponse, error) { + rsp, err := c.CreateAlertsMigration(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseCreateAlertsMigrationResponse(rsp) } -// Override default JSON handling for OutputRemoteElasticsearch_Secrets to handle AdditionalProperties -func (a OutputRemoteElasticsearch_Secrets) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// ReadAlertsMigrationStatusWithResponse request returning *ReadAlertsMigrationStatusResponse +func (c *ClientWithResponses) ReadAlertsMigrationStatusWithResponse(ctx context.Context, params *ReadAlertsMigrationStatusParams, reqEditors ...RequestEditorFn) (*ReadAlertsMigrationStatusResponse, error) { + rsp, err := c.ReadAlertsMigrationStatus(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseReadAlertsMigrationStatusResponse(rsp) +} - if a.ServiceToken != nil { - object["service_token"], err = json.Marshal(a.ServiceToken) - if err != nil { - return nil, fmt.Errorf("error marshaling 'service_token': %w", err) - } +// SearchAlertsWithBodyWithResponse request with arbitrary body returning *SearchAlertsResponse +func (c *ClientWithResponses) SearchAlertsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SearchAlertsResponse, error) { + rsp, err := c.SearchAlertsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseSearchAlertsResponse(rsp) +} - if a.Ssl != nil { - object["ssl"], err = json.Marshal(a.Ssl) - if err != nil { - return nil, fmt.Errorf("error marshaling 'ssl': %w", err) - } +func (c *ClientWithResponses) SearchAlertsWithResponse(ctx context.Context, body SearchAlertsJSONRequestBody, reqEditors ...RequestEditorFn) (*SearchAlertsResponse, error) { + rsp, err := c.SearchAlerts(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseSearchAlertsResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// SetAlertsStatusWithBodyWithResponse request with arbitrary body returning *SetAlertsStatusResponse +func (c *ClientWithResponses) SetAlertsStatusWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SetAlertsStatusResponse, error) { + rsp, err := c.SetAlertsStatusWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseSetAlertsStatusResponse(rsp) } -// Getter for additional properties for OutputShipper. Returns the specified -// element and whether it was found -func (a OutputShipper) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) SetAlertsStatusWithResponse(ctx context.Context, body SetAlertsStatusJSONRequestBody, reqEditors ...RequestEditorFn) (*SetAlertsStatusResponse, error) { + rsp, err := c.SetAlertsStatus(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseSetAlertsStatusResponse(rsp) } -// Setter for additional properties for OutputShipper -func (a *OutputShipper) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// SetAlertTagsWithBodyWithResponse request with arbitrary body returning *SetAlertTagsResponse +func (c *ClientWithResponses) SetAlertTagsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*SetAlertTagsResponse, error) { + rsp, err := c.SetAlertTagsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseSetAlertTagsResponse(rsp) } -// Override default JSON handling for OutputShipper to handle AdditionalProperties -func (a *OutputShipper) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +func (c *ClientWithResponses) SetAlertTagsWithResponse(ctx context.Context, body SetAlertTagsJSONRequestBody, reqEditors ...RequestEditorFn) (*SetAlertTagsResponse, error) { + rsp, err := c.SetAlertTags(ctx, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseSetAlertTagsResponse(rsp) +} - if raw, found := object["compression_level"]; found { - err = json.Unmarshal(raw, &a.CompressionLevel) - if err != nil { - return fmt.Errorf("error reading 'compression_level': %w", err) - } - delete(object, "compression_level") +// ReadTagsWithResponse request returning *ReadTagsResponse +func (c *ClientWithResponses) ReadTagsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadTagsResponse, error) { + rsp, err := c.ReadTags(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseReadTagsResponse(rsp) +} - if raw, found := object["disk_queue_compression_enabled"]; found { - err = json.Unmarshal(raw, &a.DiskQueueCompressionEnabled) - if err != nil { - return fmt.Errorf("error reading 'disk_queue_compression_enabled': %w", err) - } - delete(object, "disk_queue_compression_enabled") +// RotateEncryptionKeyWithResponse request returning *RotateEncryptionKeyResponse +func (c *ClientWithResponses) RotateEncryptionKeyWithResponse(ctx context.Context, params *RotateEncryptionKeyParams, reqEditors ...RequestEditorFn) (*RotateEncryptionKeyResponse, error) { + rsp, err := c.RotateEncryptionKey(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseRotateEncryptionKeyResponse(rsp) +} - if raw, found := object["disk_queue_enabled"]; found { - err = json.Unmarshal(raw, &a.DiskQueueEnabled) - if err != nil { - return fmt.Errorf("error reading 'disk_queue_enabled': %w", err) - } - delete(object, "disk_queue_enabled") +// EndpointGetActionsListWithResponse request returning *EndpointGetActionsListResponse +func (c *ClientWithResponses) EndpointGetActionsListWithResponse(ctx context.Context, params *EndpointGetActionsListParams, reqEditors ...RequestEditorFn) (*EndpointGetActionsListResponse, error) { + rsp, err := c.EndpointGetActionsList(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseEndpointGetActionsListResponse(rsp) +} - if raw, found := object["disk_queue_encryption_enabled"]; found { - err = json.Unmarshal(raw, &a.DiskQueueEncryptionEnabled) - if err != nil { - return fmt.Errorf("error reading 'disk_queue_encryption_enabled': %w", err) - } - delete(object, "disk_queue_encryption_enabled") +// EndpointExecuteActionWithBodyWithResponse request with arbitrary body returning *EndpointExecuteActionResponse +func (c *ClientWithResponses) EndpointExecuteActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointExecuteActionResponse, error) { + rsp, err := c.EndpointExecuteActionWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseEndpointExecuteActionResponse(rsp) +} - if raw, found := object["disk_queue_max_size"]; found { - err = json.Unmarshal(raw, &a.DiskQueueMaxSize) - if err != nil { - return fmt.Errorf("error reading 'disk_queue_max_size': %w", err) - } - delete(object, "disk_queue_max_size") +func (c *ClientWithResponses) EndpointExecuteActionWithResponse(ctx context.Context, body EndpointExecuteActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointExecuteActionResponse, error) { + rsp, err := c.EndpointExecuteAction(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseEndpointExecuteActionResponse(rsp) +} - if raw, found := object["disk_queue_path"]; found { - err = json.Unmarshal(raw, &a.DiskQueuePath) - if err != nil { - return fmt.Errorf("error reading 'disk_queue_path': %w", err) - } - delete(object, "disk_queue_path") +// EndpointGetFileActionWithBodyWithResponse request with arbitrary body returning *EndpointGetFileActionResponse +func (c *ClientWithResponses) EndpointGetFileActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointGetFileActionResponse, error) { + rsp, err := c.EndpointGetFileActionWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseEndpointGetFileActionResponse(rsp) +} - if raw, found := object["loadbalance"]; found { - err = json.Unmarshal(raw, &a.Loadbalance) - if err != nil { - return fmt.Errorf("error reading 'loadbalance': %w", err) - } - delete(object, "loadbalance") +func (c *ClientWithResponses) EndpointGetFileActionWithResponse(ctx context.Context, body EndpointGetFileActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointGetFileActionResponse, error) { + rsp, err := c.EndpointGetFileAction(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseEndpointGetFileActionResponse(rsp) +} - if raw, found := object["max_batch_bytes"]; found { - err = json.Unmarshal(raw, &a.MaxBatchBytes) - if err != nil { - return fmt.Errorf("error reading 'max_batch_bytes': %w", err) - } - delete(object, "max_batch_bytes") +// EndpointIsolateActionWithBodyWithResponse request with arbitrary body returning *EndpointIsolateActionResponse +func (c *ClientWithResponses) EndpointIsolateActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointIsolateActionResponse, error) { + rsp, err := c.EndpointIsolateActionWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseEndpointIsolateActionResponse(rsp) +} - if raw, found := object["mem_queue_events"]; found { - err = json.Unmarshal(raw, &a.MemQueueEvents) - if err != nil { - return fmt.Errorf("error reading 'mem_queue_events': %w", err) - } - delete(object, "mem_queue_events") +func (c *ClientWithResponses) EndpointIsolateActionWithResponse(ctx context.Context, body EndpointIsolateActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointIsolateActionResponse, error) { + rsp, err := c.EndpointIsolateAction(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseEndpointIsolateActionResponse(rsp) +} - if raw, found := object["queue_flush_timeout"]; found { - err = json.Unmarshal(raw, &a.QueueFlushTimeout) - if err != nil { - return fmt.Errorf("error reading 'queue_flush_timeout': %w", err) - } - delete(object, "queue_flush_timeout") +// EndpointKillProcessActionWithBodyWithResponse request with arbitrary body returning *EndpointKillProcessActionResponse +func (c *ClientWithResponses) EndpointKillProcessActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointKillProcessActionResponse, error) { + rsp, err := c.EndpointKillProcessActionWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseEndpointKillProcessActionResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) EndpointKillProcessActionWithResponse(ctx context.Context, body EndpointKillProcessActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointKillProcessActionResponse, error) { + rsp, err := c.EndpointKillProcessAction(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseEndpointKillProcessActionResponse(rsp) } -// Override default JSON handling for OutputShipper to handle AdditionalProperties -func (a OutputShipper) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// EndpointGetProcessesActionWithBodyWithResponse request with arbitrary body returning *EndpointGetProcessesActionResponse +func (c *ClientWithResponses) EndpointGetProcessesActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointGetProcessesActionResponse, error) { + rsp, err := c.EndpointGetProcessesActionWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseEndpointGetProcessesActionResponse(rsp) +} - object["compression_level"], err = json.Marshal(a.CompressionLevel) +func (c *ClientWithResponses) EndpointGetProcessesActionWithResponse(ctx context.Context, body EndpointGetProcessesActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointGetProcessesActionResponse, error) { + rsp, err := c.EndpointGetProcessesAction(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'compression_level': %w", err) + return nil, err } + return ParseEndpointGetProcessesActionResponse(rsp) +} - object["disk_queue_compression_enabled"], err = json.Marshal(a.DiskQueueCompressionEnabled) +// RunScriptActionWithBodyWithResponse request with arbitrary body returning *RunScriptActionResponse +func (c *ClientWithResponses) RunScriptActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*RunScriptActionResponse, error) { + rsp, err := c.RunScriptActionWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'disk_queue_compression_enabled': %w", err) + return nil, err } + return ParseRunScriptActionResponse(rsp) +} - if a.DiskQueueEnabled != nil { - object["disk_queue_enabled"], err = json.Marshal(a.DiskQueueEnabled) - if err != nil { - return nil, fmt.Errorf("error marshaling 'disk_queue_enabled': %w", err) - } +func (c *ClientWithResponses) RunScriptActionWithResponse(ctx context.Context, body RunScriptActionJSONRequestBody, reqEditors ...RequestEditorFn) (*RunScriptActionResponse, error) { + rsp, err := c.RunScriptAction(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseRunScriptActionResponse(rsp) +} - object["disk_queue_encryption_enabled"], err = json.Marshal(a.DiskQueueEncryptionEnabled) +// EndpointScanActionWithBodyWithResponse request with arbitrary body returning *EndpointScanActionResponse +func (c *ClientWithResponses) EndpointScanActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointScanActionResponse, error) { + rsp, err := c.EndpointScanActionWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'disk_queue_encryption_enabled': %w", err) + return nil, err } + return ParseEndpointScanActionResponse(rsp) +} - object["disk_queue_max_size"], err = json.Marshal(a.DiskQueueMaxSize) +func (c *ClientWithResponses) EndpointScanActionWithResponse(ctx context.Context, body EndpointScanActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointScanActionResponse, error) { + rsp, err := c.EndpointScanAction(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'disk_queue_max_size': %w", err) + return nil, err } + return ParseEndpointScanActionResponse(rsp) +} - object["disk_queue_path"], err = json.Marshal(a.DiskQueuePath) +// EndpointGetActionsStateWithResponse request returning *EndpointGetActionsStateResponse +func (c *ClientWithResponses) EndpointGetActionsStateWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*EndpointGetActionsStateResponse, error) { + rsp, err := c.EndpointGetActionsState(ctx, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'disk_queue_path': %w", err) + return nil, err } + return ParseEndpointGetActionsStateResponse(rsp) +} - object["loadbalance"], err = json.Marshal(a.Loadbalance) +// EndpointSuspendProcessActionWithBodyWithResponse request with arbitrary body returning *EndpointSuspendProcessActionResponse +func (c *ClientWithResponses) EndpointSuspendProcessActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointSuspendProcessActionResponse, error) { + rsp, err := c.EndpointSuspendProcessActionWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'loadbalance': %w", err) + return nil, err } + return ParseEndpointSuspendProcessActionResponse(rsp) +} - object["max_batch_bytes"], err = json.Marshal(a.MaxBatchBytes) +func (c *ClientWithResponses) EndpointSuspendProcessActionWithResponse(ctx context.Context, body EndpointSuspendProcessActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointSuspendProcessActionResponse, error) { + rsp, err := c.EndpointSuspendProcessAction(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'max_batch_bytes': %w", err) + return nil, err } + return ParseEndpointSuspendProcessActionResponse(rsp) +} - object["mem_queue_events"], err = json.Marshal(a.MemQueueEvents) +// EndpointUnisolateActionWithBodyWithResponse request with arbitrary body returning *EndpointUnisolateActionResponse +func (c *ClientWithResponses) EndpointUnisolateActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointUnisolateActionResponse, error) { + rsp, err := c.EndpointUnisolateActionWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'mem_queue_events': %w", err) + return nil, err } + return ParseEndpointUnisolateActionResponse(rsp) +} - object["queue_flush_timeout"], err = json.Marshal(a.QueueFlushTimeout) +func (c *ClientWithResponses) EndpointUnisolateActionWithResponse(ctx context.Context, body EndpointUnisolateActionJSONRequestBody, reqEditors ...RequestEditorFn) (*EndpointUnisolateActionResponse, error) { + rsp, err := c.EndpointUnisolateAction(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'queue_flush_timeout': %w", err) + return nil, err } + return ParseEndpointUnisolateActionResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// EndpointUploadActionWithBodyWithResponse request with arbitrary body returning *EndpointUploadActionResponse +func (c *ClientWithResponses) EndpointUploadActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*EndpointUploadActionResponse, error) { + rsp, err := c.EndpointUploadActionWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseEndpointUploadActionResponse(rsp) } -// Getter for additional properties for OutputSsl. Returns the specified -// element and whether it was found -func (a OutputSsl) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// EndpointGetActionsDetailsWithResponse request returning *EndpointGetActionsDetailsResponse +func (c *ClientWithResponses) EndpointGetActionsDetailsWithResponse(ctx context.Context, actionId string, reqEditors ...RequestEditorFn) (*EndpointGetActionsDetailsResponse, error) { + rsp, err := c.EndpointGetActionsDetails(ctx, actionId, reqEditors...) + if err != nil { + return nil, err } - return + return ParseEndpointGetActionsDetailsResponse(rsp) } -// Setter for additional properties for OutputSsl -func (a *OutputSsl) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// EndpointFileInfoWithResponse request returning *EndpointFileInfoResponse +func (c *ClientWithResponses) EndpointFileInfoWithResponse(ctx context.Context, actionId string, fileId string, reqEditors ...RequestEditorFn) (*EndpointFileInfoResponse, error) { + rsp, err := c.EndpointFileInfo(ctx, actionId, fileId, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseEndpointFileInfoResponse(rsp) } -// Override default JSON handling for OutputSsl to handle AdditionalProperties -func (a *OutputSsl) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// EndpointFileDownloadWithResponse request returning *EndpointFileDownloadResponse +func (c *ClientWithResponses) EndpointFileDownloadWithResponse(ctx context.Context, actionId string, fileId string, reqEditors ...RequestEditorFn) (*EndpointFileDownloadResponse, error) { + rsp, err := c.EndpointFileDownload(ctx, actionId, fileId, reqEditors...) if err != nil { - return err + return nil, err } + return ParseEndpointFileDownloadResponse(rsp) +} - if raw, found := object["certificate"]; found { - err = json.Unmarshal(raw, &a.Certificate) - if err != nil { - return fmt.Errorf("error reading 'certificate': %w", err) - } - delete(object, "certificate") +// EndpointGetActionsStatusWithResponse request returning *EndpointGetActionsStatusResponse +func (c *ClientWithResponses) EndpointGetActionsStatusWithResponse(ctx context.Context, params *EndpointGetActionsStatusParams, reqEditors ...RequestEditorFn) (*EndpointGetActionsStatusResponse, error) { + rsp, err := c.EndpointGetActionsStatus(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseEndpointGetActionsStatusResponse(rsp) +} - if raw, found := object["certificate_authorities"]; found { - err = json.Unmarshal(raw, &a.CertificateAuthorities) - if err != nil { - return fmt.Errorf("error reading 'certificate_authorities': %w", err) - } - delete(object, "certificate_authorities") +// GetEndpointMetadataListWithResponse request returning *GetEndpointMetadataListResponse +func (c *ClientWithResponses) GetEndpointMetadataListWithResponse(ctx context.Context, params *GetEndpointMetadataListParams, reqEditors ...RequestEditorFn) (*GetEndpointMetadataListResponse, error) { + rsp, err := c.GetEndpointMetadataList(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetEndpointMetadataListResponse(rsp) +} - if raw, found := object["key"]; found { - err = json.Unmarshal(raw, &a.Key) - if err != nil { - return fmt.Errorf("error reading 'key': %w", err) - } - delete(object, "key") +// GetEndpointMetadataWithResponse request returning *GetEndpointMetadataResponse +func (c *ClientWithResponses) GetEndpointMetadataWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetEndpointMetadataResponse, error) { + rsp, err := c.GetEndpointMetadata(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseGetEndpointMetadataResponse(rsp) +} - if raw, found := object["verification_mode"]; found { - err = json.Unmarshal(raw, &a.VerificationMode) - if err != nil { - return fmt.Errorf("error reading 'verification_mode': %w", err) - } - delete(object, "verification_mode") +// GetPolicyResponseWithResponse request returning *GetPolicyResponseResponse +func (c *ClientWithResponses) GetPolicyResponseWithResponse(ctx context.Context, params *GetPolicyResponseParams, reqEditors ...RequestEditorFn) (*GetPolicyResponseResponse, error) { + rsp, err := c.GetPolicyResponse(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetPolicyResponseResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// GetProtectionUpdatesNoteWithResponse request returning *GetProtectionUpdatesNoteResponse +func (c *ClientWithResponses) GetProtectionUpdatesNoteWithResponse(ctx context.Context, packagePolicyId string, reqEditors ...RequestEditorFn) (*GetProtectionUpdatesNoteResponse, error) { + rsp, err := c.GetProtectionUpdatesNote(ctx, packagePolicyId, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseGetProtectionUpdatesNoteResponse(rsp) } -// Override default JSON handling for OutputSsl to handle AdditionalProperties -func (a OutputSsl) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// CreateUpdateProtectionUpdatesNoteWithBodyWithResponse request with arbitrary body returning *CreateUpdateProtectionUpdatesNoteResponse +func (c *ClientWithResponses) CreateUpdateProtectionUpdatesNoteWithBodyWithResponse(ctx context.Context, packagePolicyId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateUpdateProtectionUpdatesNoteResponse, error) { + rsp, err := c.CreateUpdateProtectionUpdatesNoteWithBody(ctx, packagePolicyId, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseCreateUpdateProtectionUpdatesNoteResponse(rsp) +} - if a.Certificate != nil { - object["certificate"], err = json.Marshal(a.Certificate) - if err != nil { - return nil, fmt.Errorf("error marshaling 'certificate': %w", err) - } +func (c *ClientWithResponses) CreateUpdateProtectionUpdatesNoteWithResponse(ctx context.Context, packagePolicyId string, body CreateUpdateProtectionUpdatesNoteJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateUpdateProtectionUpdatesNoteResponse, error) { + rsp, err := c.CreateUpdateProtectionUpdatesNote(ctx, packagePolicyId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateUpdateProtectionUpdatesNoteResponse(rsp) +} - if a.CertificateAuthorities != nil { - object["certificate_authorities"], err = json.Marshal(a.CertificateAuthorities) - if err != nil { - return nil, fmt.Errorf("error marshaling 'certificate_authorities': %w", err) - } +// CreateEndpointListWithResponse request returning *CreateEndpointListResponse +func (c *ClientWithResponses) CreateEndpointListWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*CreateEndpointListResponse, error) { + rsp, err := c.CreateEndpointList(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateEndpointListResponse(rsp) +} - if a.Key != nil { - object["key"], err = json.Marshal(a.Key) - if err != nil { - return nil, fmt.Errorf("error marshaling 'key': %w", err) - } +// DeleteEndpointListItemWithResponse request returning *DeleteEndpointListItemResponse +func (c *ClientWithResponses) DeleteEndpointListItemWithResponse(ctx context.Context, params *DeleteEndpointListItemParams, reqEditors ...RequestEditorFn) (*DeleteEndpointListItemResponse, error) { + rsp, err := c.DeleteEndpointListItem(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteEndpointListItemResponse(rsp) +} - if a.VerificationMode != nil { - object["verification_mode"], err = json.Marshal(a.VerificationMode) - if err != nil { - return nil, fmt.Errorf("error marshaling 'verification_mode': %w", err) - } +// ReadEndpointListItemWithResponse request returning *ReadEndpointListItemResponse +func (c *ClientWithResponses) ReadEndpointListItemWithResponse(ctx context.Context, params *ReadEndpointListItemParams, reqEditors ...RequestEditorFn) (*ReadEndpointListItemResponse, error) { + rsp, err := c.ReadEndpointListItem(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseReadEndpointListItemResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// CreateEndpointListItemWithBodyWithResponse request with arbitrary body returning *CreateEndpointListItemResponse +func (c *ClientWithResponses) CreateEndpointListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateEndpointListItemResponse, error) { + rsp, err := c.CreateEndpointListItemWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseCreateEndpointListItemResponse(rsp) } -// Getter for additional properties for PackageInfo. Returns the specified -// element and whether it was found -func (a PackageInfo) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) CreateEndpointListItemWithResponse(ctx context.Context, body CreateEndpointListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateEndpointListItemResponse, error) { + rsp, err := c.CreateEndpointListItem(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseCreateEndpointListItemResponse(rsp) } -// Setter for additional properties for PackageInfo -func (a *PackageInfo) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// UpdateEndpointListItemWithBodyWithResponse request with arbitrary body returning *UpdateEndpointListItemResponse +func (c *ClientWithResponses) UpdateEndpointListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateEndpointListItemResponse, error) { + rsp, err := c.UpdateEndpointListItemWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseUpdateEndpointListItemResponse(rsp) } -// Override default JSON handling for PackageInfo to handle AdditionalProperties -func (a *PackageInfo) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +func (c *ClientWithResponses) UpdateEndpointListItemWithResponse(ctx context.Context, body UpdateEndpointListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateEndpointListItemResponse, error) { + rsp, err := c.UpdateEndpointListItem(ctx, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseUpdateEndpointListItemResponse(rsp) +} - if raw, found := object["agent"]; found { - err = json.Unmarshal(raw, &a.Agent) - if err != nil { - return fmt.Errorf("error reading 'agent': %w", err) - } - delete(object, "agent") +// FindEndpointListItemsWithResponse request returning *FindEndpointListItemsResponse +func (c *ClientWithResponses) FindEndpointListItemsWithResponse(ctx context.Context, params *FindEndpointListItemsParams, reqEditors ...RequestEditorFn) (*FindEndpointListItemsResponse, error) { + rsp, err := c.FindEndpointListItems(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseFindEndpointListItemsResponse(rsp) +} - if raw, found := object["asset_tags"]; found { - err = json.Unmarshal(raw, &a.AssetTags) - if err != nil { - return fmt.Errorf("error reading 'asset_tags': %w", err) - } - delete(object, "asset_tags") +// DeleteMonitoringEngineWithResponse request returning *DeleteMonitoringEngineResponse +func (c *ClientWithResponses) DeleteMonitoringEngineWithResponse(ctx context.Context, params *DeleteMonitoringEngineParams, reqEditors ...RequestEditorFn) (*DeleteMonitoringEngineResponse, error) { + rsp, err := c.DeleteMonitoringEngine(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteMonitoringEngineResponse(rsp) +} - if raw, found := object["assets"]; found { - err = json.Unmarshal(raw, &a.Assets) - if err != nil { - return fmt.Errorf("error reading 'assets': %w", err) - } - delete(object, "assets") +// DisableMonitoringEngineWithResponse request returning *DisableMonitoringEngineResponse +func (c *ClientWithResponses) DisableMonitoringEngineWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*DisableMonitoringEngineResponse, error) { + rsp, err := c.DisableMonitoringEngine(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseDisableMonitoringEngineResponse(rsp) +} - if raw, found := object["categories"]; found { - err = json.Unmarshal(raw, &a.Categories) - if err != nil { - return fmt.Errorf("error reading 'categories': %w", err) - } - delete(object, "categories") +// InitMonitoringEngineWithResponse request returning *InitMonitoringEngineResponse +func (c *ClientWithResponses) InitMonitoringEngineWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*InitMonitoringEngineResponse, error) { + rsp, err := c.InitMonitoringEngine(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseInitMonitoringEngineResponse(rsp) +} - if raw, found := object["conditions"]; found { - err = json.Unmarshal(raw, &a.Conditions) - if err != nil { - return fmt.Errorf("error reading 'conditions': %w", err) - } - delete(object, "conditions") +// ScheduleMonitoringEngineWithResponse request returning *ScheduleMonitoringEngineResponse +func (c *ClientWithResponses) ScheduleMonitoringEngineWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ScheduleMonitoringEngineResponse, error) { + rsp, err := c.ScheduleMonitoringEngine(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseScheduleMonitoringEngineResponse(rsp) +} - if raw, found := object["data_streams"]; found { - err = json.Unmarshal(raw, &a.DataStreams) - if err != nil { - return fmt.Errorf("error reading 'data_streams': %w", err) - } - delete(object, "data_streams") +// PrivMonHealthWithResponse request returning *PrivMonHealthResponse +func (c *ClientWithResponses) PrivMonHealthWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*PrivMonHealthResponse, error) { + rsp, err := c.PrivMonHealth(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParsePrivMonHealthResponse(rsp) +} - if raw, found := object["description"]; found { - err = json.Unmarshal(raw, &a.Description) - if err != nil { - return fmt.Errorf("error reading 'description': %w", err) - } - delete(object, "description") +// PrivMonPrivilegesWithResponse request returning *PrivMonPrivilegesResponse +func (c *ClientWithResponses) PrivMonPrivilegesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*PrivMonPrivilegesResponse, error) { + rsp, err := c.PrivMonPrivileges(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParsePrivMonPrivilegesResponse(rsp) +} - if raw, found := object["discovery"]; found { - err = json.Unmarshal(raw, &a.Discovery) - if err != nil { - return fmt.Errorf("error reading 'discovery': %w", err) - } - delete(object, "discovery") +// CreatePrivMonUserWithBodyWithResponse request with arbitrary body returning *CreatePrivMonUserResponse +func (c *ClientWithResponses) CreatePrivMonUserWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreatePrivMonUserResponse, error) { + rsp, err := c.CreatePrivMonUserWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreatePrivMonUserResponse(rsp) +} - if raw, found := object["download"]; found { - err = json.Unmarshal(raw, &a.Download) - if err != nil { - return fmt.Errorf("error reading 'download': %w", err) - } - delete(object, "download") +func (c *ClientWithResponses) CreatePrivMonUserWithResponse(ctx context.Context, body CreatePrivMonUserJSONRequestBody, reqEditors ...RequestEditorFn) (*CreatePrivMonUserResponse, error) { + rsp, err := c.CreatePrivMonUser(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreatePrivMonUserResponse(rsp) +} - if raw, found := object["elasticsearch"]; found { - err = json.Unmarshal(raw, &a.Elasticsearch) - if err != nil { - return fmt.Errorf("error reading 'elasticsearch': %w", err) - } - delete(object, "elasticsearch") +// PrivmonBulkUploadUsersCSVWithBodyWithResponse request with arbitrary body returning *PrivmonBulkUploadUsersCSVResponse +func (c *ClientWithResponses) PrivmonBulkUploadUsersCSVWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PrivmonBulkUploadUsersCSVResponse, error) { + rsp, err := c.PrivmonBulkUploadUsersCSVWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePrivmonBulkUploadUsersCSVResponse(rsp) +} - if raw, found := object["format_version"]; found { - err = json.Unmarshal(raw, &a.FormatVersion) - if err != nil { - return fmt.Errorf("error reading 'format_version': %w", err) - } - delete(object, "format_version") +// ListPrivMonUsersWithResponse request returning *ListPrivMonUsersResponse +func (c *ClientWithResponses) ListPrivMonUsersWithResponse(ctx context.Context, params *ListPrivMonUsersParams, reqEditors ...RequestEditorFn) (*ListPrivMonUsersResponse, error) { + rsp, err := c.ListPrivMonUsers(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseListPrivMonUsersResponse(rsp) +} - if raw, found := object["icons"]; found { - err = json.Unmarshal(raw, &a.Icons) - if err != nil { - return fmt.Errorf("error reading 'icons': %w", err) - } - delete(object, "icons") +// DeletePrivMonUserWithResponse request returning *DeletePrivMonUserResponse +func (c *ClientWithResponses) DeletePrivMonUserWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeletePrivMonUserResponse, error) { + rsp, err := c.DeletePrivMonUser(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseDeletePrivMonUserResponse(rsp) +} - if raw, found := object["installationInfo"]; found { - err = json.Unmarshal(raw, &a.InstallationInfo) - if err != nil { - return fmt.Errorf("error reading 'installationInfo': %w", err) - } - delete(object, "installationInfo") +// UpdatePrivMonUserWithBodyWithResponse request with arbitrary body returning *UpdatePrivMonUserResponse +func (c *ClientWithResponses) UpdatePrivMonUserWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdatePrivMonUserResponse, error) { + rsp, err := c.UpdatePrivMonUserWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdatePrivMonUserResponse(rsp) +} - if raw, found := object["internal"]; found { - err = json.Unmarshal(raw, &a.Internal) - if err != nil { - return fmt.Errorf("error reading 'internal': %w", err) - } - delete(object, "internal") +func (c *ClientWithResponses) UpdatePrivMonUserWithResponse(ctx context.Context, id string, body UpdatePrivMonUserJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdatePrivMonUserResponse, error) { + rsp, err := c.UpdatePrivMonUser(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdatePrivMonUserResponse(rsp) +} - if raw, found := object["keepPoliciesUpToDate"]; found { - err = json.Unmarshal(raw, &a.KeepPoliciesUpToDate) - if err != nil { - return fmt.Errorf("error reading 'keepPoliciesUpToDate': %w", err) - } - delete(object, "keepPoliciesUpToDate") +// InstallPrivilegedAccessDetectionPackageWithResponse request returning *InstallPrivilegedAccessDetectionPackageResponse +func (c *ClientWithResponses) InstallPrivilegedAccessDetectionPackageWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*InstallPrivilegedAccessDetectionPackageResponse, error) { + rsp, err := c.InstallPrivilegedAccessDetectionPackage(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseInstallPrivilegedAccessDetectionPackageResponse(rsp) +} - if raw, found := object["latestVersion"]; found { - err = json.Unmarshal(raw, &a.LatestVersion) - if err != nil { - return fmt.Errorf("error reading 'latestVersion': %w", err) - } - delete(object, "latestVersion") +// GetPrivilegedAccessDetectionPackageStatusWithResponse request returning *GetPrivilegedAccessDetectionPackageStatusResponse +func (c *ClientWithResponses) GetPrivilegedAccessDetectionPackageStatusWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetPrivilegedAccessDetectionPackageStatusResponse, error) { + rsp, err := c.GetPrivilegedAccessDetectionPackageStatus(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetPrivilegedAccessDetectionPackageStatusResponse(rsp) +} - if raw, found := object["license"]; found { - err = json.Unmarshal(raw, &a.License) - if err != nil { - return fmt.Errorf("error reading 'license': %w", err) - } - delete(object, "license") +// InitEntityStoreWithBodyWithResponse request with arbitrary body returning *InitEntityStoreResponse +func (c *ClientWithResponses) InitEntityStoreWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*InitEntityStoreResponse, error) { + rsp, err := c.InitEntityStoreWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseInitEntityStoreResponse(rsp) +} - if raw, found := object["licensePath"]; found { - err = json.Unmarshal(raw, &a.LicensePath) - if err != nil { - return fmt.Errorf("error reading 'licensePath': %w", err) - } - delete(object, "licensePath") +func (c *ClientWithResponses) InitEntityStoreWithResponse(ctx context.Context, body InitEntityStoreJSONRequestBody, reqEditors ...RequestEditorFn) (*InitEntityStoreResponse, error) { + rsp, err := c.InitEntityStore(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseInitEntityStoreResponse(rsp) +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// ListEntityEnginesWithResponse request returning *ListEntityEnginesResponse +func (c *ClientWithResponses) ListEntityEnginesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ListEntityEnginesResponse, error) { + rsp, err := c.ListEntityEngines(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseListEntityEnginesResponse(rsp) +} - if raw, found := object["notice"]; found { - err = json.Unmarshal(raw, &a.Notice) - if err != nil { - return fmt.Errorf("error reading 'notice': %w", err) - } - delete(object, "notice") +// ApplyEntityEngineDataviewIndicesWithResponse request returning *ApplyEntityEngineDataviewIndicesResponse +func (c *ClientWithResponses) ApplyEntityEngineDataviewIndicesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ApplyEntityEngineDataviewIndicesResponse, error) { + rsp, err := c.ApplyEntityEngineDataviewIndices(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseApplyEntityEngineDataviewIndicesResponse(rsp) +} - if raw, found := object["owner"]; found { - err = json.Unmarshal(raw, &a.Owner) - if err != nil { - return fmt.Errorf("error reading 'owner': %w", err) - } - delete(object, "owner") +// DeleteEntityEngineWithResponse request returning *DeleteEntityEngineResponse +func (c *ClientWithResponses) DeleteEntityEngineWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, params *DeleteEntityEngineParams, reqEditors ...RequestEditorFn) (*DeleteEntityEngineResponse, error) { + rsp, err := c.DeleteEntityEngine(ctx, entityType, params, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteEntityEngineResponse(rsp) +} - if raw, found := object["path"]; found { - err = json.Unmarshal(raw, &a.Path) - if err != nil { - return fmt.Errorf("error reading 'path': %w", err) - } - delete(object, "path") +// GetEntityEngineWithResponse request returning *GetEntityEngineResponse +func (c *ClientWithResponses) GetEntityEngineWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*GetEntityEngineResponse, error) { + rsp, err := c.GetEntityEngine(ctx, entityType, reqEditors...) + if err != nil { + return nil, err } + return ParseGetEntityEngineResponse(rsp) +} - if raw, found := object["policy_templates"]; found { - err = json.Unmarshal(raw, &a.PolicyTemplates) - if err != nil { - return fmt.Errorf("error reading 'policy_templates': %w", err) - } - delete(object, "policy_templates") +// InitEntityEngineWithBodyWithResponse request with arbitrary body returning *InitEntityEngineResponse +func (c *ClientWithResponses) InitEntityEngineWithBodyWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*InitEntityEngineResponse, error) { + rsp, err := c.InitEntityEngineWithBody(ctx, entityType, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseInitEntityEngineResponse(rsp) +} - if raw, found := object["readme"]; found { - err = json.Unmarshal(raw, &a.Readme) - if err != nil { - return fmt.Errorf("error reading 'readme': %w", err) - } - delete(object, "readme") +func (c *ClientWithResponses) InitEntityEngineWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, body InitEntityEngineJSONRequestBody, reqEditors ...RequestEditorFn) (*InitEntityEngineResponse, error) { + rsp, err := c.InitEntityEngine(ctx, entityType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseInitEntityEngineResponse(rsp) +} - if raw, found := object["release"]; found { - err = json.Unmarshal(raw, &a.Release) - if err != nil { - return fmt.Errorf("error reading 'release': %w", err) - } - delete(object, "release") +// StartEntityEngineWithResponse request returning *StartEntityEngineResponse +func (c *ClientWithResponses) StartEntityEngineWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*StartEntityEngineResponse, error) { + rsp, err := c.StartEntityEngine(ctx, entityType, reqEditors...) + if err != nil { + return nil, err } + return ParseStartEntityEngineResponse(rsp) +} - if raw, found := object["screenshots"]; found { - err = json.Unmarshal(raw, &a.Screenshots) - if err != nil { - return fmt.Errorf("error reading 'screenshots': %w", err) - } - delete(object, "screenshots") +// StopEntityEngineWithResponse request returning *StopEntityEngineResponse +func (c *ClientWithResponses) StopEntityEngineWithResponse(ctx context.Context, entityType SecurityEntityAnalyticsAPIEntityType, reqEditors ...RequestEditorFn) (*StopEntityEngineResponse, error) { + rsp, err := c.StopEntityEngine(ctx, entityType, reqEditors...) + if err != nil { + return nil, err } + return ParseStopEntityEngineResponse(rsp) +} - if raw, found := object["signature_path"]; found { - err = json.Unmarshal(raw, &a.SignaturePath) - if err != nil { - return fmt.Errorf("error reading 'signature_path': %w", err) - } - delete(object, "signature_path") +// ListEntitiesWithResponse request returning *ListEntitiesResponse +func (c *ClientWithResponses) ListEntitiesWithResponse(ctx context.Context, params *ListEntitiesParams, reqEditors ...RequestEditorFn) (*ListEntitiesResponse, error) { + rsp, err := c.ListEntities(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseListEntitiesResponse(rsp) +} - if raw, found := object["source"]; found { - err = json.Unmarshal(raw, &a.Source) - if err != nil { - return fmt.Errorf("error reading 'source': %w", err) - } - delete(object, "source") +// GetEntityStoreStatusWithResponse request returning *GetEntityStoreStatusResponse +func (c *ClientWithResponses) GetEntityStoreStatusWithResponse(ctx context.Context, params *GetEntityStoreStatusParams, reqEditors ...RequestEditorFn) (*GetEntityStoreStatusResponse, error) { + rsp, err := c.GetEntityStoreStatus(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetEntityStoreStatusResponse(rsp) +} - if raw, found := object["status"]; found { - err = json.Unmarshal(raw, &a.Status) - if err != nil { - return fmt.Errorf("error reading 'status': %w", err) - } - delete(object, "status") +// DeleteExceptionListWithResponse request returning *DeleteExceptionListResponse +func (c *ClientWithResponses) DeleteExceptionListWithResponse(ctx context.Context, params *DeleteExceptionListParams, reqEditors ...RequestEditorFn) (*DeleteExceptionListResponse, error) { + rsp, err := c.DeleteExceptionList(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteExceptionListResponse(rsp) +} - if raw, found := object["title"]; found { - err = json.Unmarshal(raw, &a.Title) - if err != nil { - return fmt.Errorf("error reading 'title': %w", err) - } - delete(object, "title") +// ReadExceptionListWithResponse request returning *ReadExceptionListResponse +func (c *ClientWithResponses) ReadExceptionListWithResponse(ctx context.Context, params *ReadExceptionListParams, reqEditors ...RequestEditorFn) (*ReadExceptionListResponse, error) { + rsp, err := c.ReadExceptionList(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseReadExceptionListResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +// CreateExceptionListWithBodyWithResponse request with arbitrary body returning *CreateExceptionListResponse +func (c *ClientWithResponses) CreateExceptionListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateExceptionListResponse, error) { + rsp, err := c.CreateExceptionListWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateExceptionListResponse(rsp) +} - if raw, found := object["vars"]; found { - err = json.Unmarshal(raw, &a.Vars) - if err != nil { - return fmt.Errorf("error reading 'vars': %w", err) - } - delete(object, "vars") +func (c *ClientWithResponses) CreateExceptionListWithResponse(ctx context.Context, body CreateExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateExceptionListResponse, error) { + rsp, err := c.CreateExceptionList(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateExceptionListResponse(rsp) +} - if raw, found := object["version"]; found { - err = json.Unmarshal(raw, &a.Version) - if err != nil { - return fmt.Errorf("error reading 'version': %w", err) - } - delete(object, "version") +// UpdateExceptionListWithBodyWithResponse request with arbitrary body returning *UpdateExceptionListResponse +func (c *ClientWithResponses) UpdateExceptionListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateExceptionListResponse, error) { + rsp, err := c.UpdateExceptionListWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateExceptionListResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) UpdateExceptionListWithResponse(ctx context.Context, body UpdateExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateExceptionListResponse, error) { + rsp, err := c.UpdateExceptionList(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseUpdateExceptionListResponse(rsp) } -// Override default JSON handling for PackageInfo to handle AdditionalProperties -func (a PackageInfo) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// DuplicateExceptionListWithResponse request returning *DuplicateExceptionListResponse +func (c *ClientWithResponses) DuplicateExceptionListWithResponse(ctx context.Context, params *DuplicateExceptionListParams, reqEditors ...RequestEditorFn) (*DuplicateExceptionListResponse, error) { + rsp, err := c.DuplicateExceptionList(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseDuplicateExceptionListResponse(rsp) +} - if a.Agent != nil { - object["agent"], err = json.Marshal(a.Agent) - if err != nil { - return nil, fmt.Errorf("error marshaling 'agent': %w", err) - } +// ExportExceptionListWithResponse request returning *ExportExceptionListResponse +func (c *ClientWithResponses) ExportExceptionListWithResponse(ctx context.Context, params *ExportExceptionListParams, reqEditors ...RequestEditorFn) (*ExportExceptionListResponse, error) { + rsp, err := c.ExportExceptionList(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseExportExceptionListResponse(rsp) +} - if a.AssetTags != nil { - object["asset_tags"], err = json.Marshal(a.AssetTags) - if err != nil { - return nil, fmt.Errorf("error marshaling 'asset_tags': %w", err) - } +// FindExceptionListsWithResponse request returning *FindExceptionListsResponse +func (c *ClientWithResponses) FindExceptionListsWithResponse(ctx context.Context, params *FindExceptionListsParams, reqEditors ...RequestEditorFn) (*FindExceptionListsResponse, error) { + rsp, err := c.FindExceptionLists(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseFindExceptionListsResponse(rsp) +} - object["assets"], err = json.Marshal(a.Assets) +// ImportExceptionListWithBodyWithResponse request with arbitrary body returning *ImportExceptionListResponse +func (c *ClientWithResponses) ImportExceptionListWithBodyWithResponse(ctx context.Context, params *ImportExceptionListParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ImportExceptionListResponse, error) { + rsp, err := c.ImportExceptionListWithBody(ctx, params, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'assets': %w", err) + return nil, err } + return ParseImportExceptionListResponse(rsp) +} - if a.Categories != nil { - object["categories"], err = json.Marshal(a.Categories) - if err != nil { - return nil, fmt.Errorf("error marshaling 'categories': %w", err) - } +// DeleteExceptionListItemWithResponse request returning *DeleteExceptionListItemResponse +func (c *ClientWithResponses) DeleteExceptionListItemWithResponse(ctx context.Context, params *DeleteExceptionListItemParams, reqEditors ...RequestEditorFn) (*DeleteExceptionListItemResponse, error) { + rsp, err := c.DeleteExceptionListItem(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteExceptionListItemResponse(rsp) +} - if a.Conditions != nil { - object["conditions"], err = json.Marshal(a.Conditions) - if err != nil { - return nil, fmt.Errorf("error marshaling 'conditions': %w", err) - } +// ReadExceptionListItemWithResponse request returning *ReadExceptionListItemResponse +func (c *ClientWithResponses) ReadExceptionListItemWithResponse(ctx context.Context, params *ReadExceptionListItemParams, reqEditors ...RequestEditorFn) (*ReadExceptionListItemResponse, error) { + rsp, err := c.ReadExceptionListItem(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseReadExceptionListItemResponse(rsp) +} - if a.DataStreams != nil { - object["data_streams"], err = json.Marshal(a.DataStreams) - if err != nil { - return nil, fmt.Errorf("error marshaling 'data_streams': %w", err) - } +// CreateExceptionListItemWithBodyWithResponse request with arbitrary body returning *CreateExceptionListItemResponse +func (c *ClientWithResponses) CreateExceptionListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateExceptionListItemResponse, error) { + rsp, err := c.CreateExceptionListItemWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateExceptionListItemResponse(rsp) +} - if a.Description != nil { - object["description"], err = json.Marshal(a.Description) - if err != nil { - return nil, fmt.Errorf("error marshaling 'description': %w", err) - } +func (c *ClientWithResponses) CreateExceptionListItemWithResponse(ctx context.Context, body CreateExceptionListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateExceptionListItemResponse, error) { + rsp, err := c.CreateExceptionListItem(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateExceptionListItemResponse(rsp) +} - if a.Discovery != nil { - object["discovery"], err = json.Marshal(a.Discovery) - if err != nil { - return nil, fmt.Errorf("error marshaling 'discovery': %w", err) - } +// UpdateExceptionListItemWithBodyWithResponse request with arbitrary body returning *UpdateExceptionListItemResponse +func (c *ClientWithResponses) UpdateExceptionListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateExceptionListItemResponse, error) { + rsp, err := c.UpdateExceptionListItemWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateExceptionListItemResponse(rsp) +} - if a.Download != nil { - object["download"], err = json.Marshal(a.Download) - if err != nil { - return nil, fmt.Errorf("error marshaling 'download': %w", err) - } +func (c *ClientWithResponses) UpdateExceptionListItemWithResponse(ctx context.Context, body UpdateExceptionListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateExceptionListItemResponse, error) { + rsp, err := c.UpdateExceptionListItem(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateExceptionListItemResponse(rsp) +} - if a.Elasticsearch != nil { - object["elasticsearch"], err = json.Marshal(a.Elasticsearch) - if err != nil { - return nil, fmt.Errorf("error marshaling 'elasticsearch': %w", err) - } +// FindExceptionListItemsWithResponse request returning *FindExceptionListItemsResponse +func (c *ClientWithResponses) FindExceptionListItemsWithResponse(ctx context.Context, params *FindExceptionListItemsParams, reqEditors ...RequestEditorFn) (*FindExceptionListItemsResponse, error) { + rsp, err := c.FindExceptionListItems(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseFindExceptionListItemsResponse(rsp) +} - if a.FormatVersion != nil { - object["format_version"], err = json.Marshal(a.FormatVersion) - if err != nil { - return nil, fmt.Errorf("error marshaling 'format_version': %w", err) - } +// ReadExceptionListSummaryWithResponse request returning *ReadExceptionListSummaryResponse +func (c *ClientWithResponses) ReadExceptionListSummaryWithResponse(ctx context.Context, params *ReadExceptionListSummaryParams, reqEditors ...RequestEditorFn) (*ReadExceptionListSummaryResponse, error) { + rsp, err := c.ReadExceptionListSummary(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseReadExceptionListSummaryResponse(rsp) +} - if a.Icons != nil { - object["icons"], err = json.Marshal(a.Icons) - if err != nil { - return nil, fmt.Errorf("error marshaling 'icons': %w", err) - } +// CreateSharedExceptionListWithBodyWithResponse request with arbitrary body returning *CreateSharedExceptionListResponse +func (c *ClientWithResponses) CreateSharedExceptionListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateSharedExceptionListResponse, error) { + rsp, err := c.CreateSharedExceptionListWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateSharedExceptionListResponse(rsp) +} - if a.InstallationInfo != nil { - object["installationInfo"], err = json.Marshal(a.InstallationInfo) - if err != nil { - return nil, fmt.Errorf("error marshaling 'installationInfo': %w", err) - } +func (c *ClientWithResponses) CreateSharedExceptionListWithResponse(ctx context.Context, body CreateSharedExceptionListJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateSharedExceptionListResponse, error) { + rsp, err := c.CreateSharedExceptionList(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateSharedExceptionListResponse(rsp) +} - if a.Internal != nil { - object["internal"], err = json.Marshal(a.Internal) - if err != nil { - return nil, fmt.Errorf("error marshaling 'internal': %w", err) - } +// GetFeaturesWithResponse request returning *GetFeaturesResponse +func (c *ClientWithResponses) GetFeaturesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFeaturesResponse, error) { + rsp, err := c.GetFeatures(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFeaturesResponse(rsp) +} - if a.KeepPoliciesUpToDate != nil { - object["keepPoliciesUpToDate"], err = json.Marshal(a.KeepPoliciesUpToDate) - if err != nil { - return nil, fmt.Errorf("error marshaling 'keepPoliciesUpToDate': %w", err) - } +// GetFleetAgentDownloadSourcesWithResponse request returning *GetFleetAgentDownloadSourcesResponse +func (c *ClientWithResponses) GetFleetAgentDownloadSourcesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetAgentDownloadSourcesResponse, error) { + rsp, err := c.GetFleetAgentDownloadSources(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetAgentDownloadSourcesResponse(rsp) +} - if a.LatestVersion != nil { - object["latestVersion"], err = json.Marshal(a.LatestVersion) - if err != nil { - return nil, fmt.Errorf("error marshaling 'latestVersion': %w", err) - } +// PostFleetAgentDownloadSourcesWithBodyWithResponse request with arbitrary body returning *PostFleetAgentDownloadSourcesResponse +func (c *ClientWithResponses) PostFleetAgentDownloadSourcesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentDownloadSourcesResponse, error) { + rsp, err := c.PostFleetAgentDownloadSourcesWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentDownloadSourcesResponse(rsp) +} - if a.License != nil { - object["license"], err = json.Marshal(a.License) - if err != nil { - return nil, fmt.Errorf("error marshaling 'license': %w", err) - } +func (c *ClientWithResponses) PostFleetAgentDownloadSourcesWithResponse(ctx context.Context, body PostFleetAgentDownloadSourcesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentDownloadSourcesResponse, error) { + rsp, err := c.PostFleetAgentDownloadSources(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentDownloadSourcesResponse(rsp) +} - if a.LicensePath != nil { - object["licensePath"], err = json.Marshal(a.LicensePath) - if err != nil { - return nil, fmt.Errorf("error marshaling 'licensePath': %w", err) - } +// DeleteFleetAgentDownloadSourcesSourceidWithResponse request returning *DeleteFleetAgentDownloadSourcesSourceidResponse +func (c *ClientWithResponses) DeleteFleetAgentDownloadSourcesSourceidWithResponse(ctx context.Context, sourceId string, reqEditors ...RequestEditorFn) (*DeleteFleetAgentDownloadSourcesSourceidResponse, error) { + rsp, err := c.DeleteFleetAgentDownloadSourcesSourceid(ctx, sourceId, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteFleetAgentDownloadSourcesSourceidResponse(rsp) +} - object["name"], err = json.Marshal(a.Name) +// GetFleetAgentDownloadSourcesSourceidWithResponse request returning *GetFleetAgentDownloadSourcesSourceidResponse +func (c *ClientWithResponses) GetFleetAgentDownloadSourcesSourceidWithResponse(ctx context.Context, sourceId string, reqEditors ...RequestEditorFn) (*GetFleetAgentDownloadSourcesSourceidResponse, error) { + rsp, err := c.GetFleetAgentDownloadSourcesSourceid(ctx, sourceId, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) + return nil, err } + return ParseGetFleetAgentDownloadSourcesSourceidResponse(rsp) +} - if a.Notice != nil { - object["notice"], err = json.Marshal(a.Notice) - if err != nil { - return nil, fmt.Errorf("error marshaling 'notice': %w", err) - } +// PutFleetAgentDownloadSourcesSourceidWithBodyWithResponse request with arbitrary body returning *PutFleetAgentDownloadSourcesSourceidResponse +func (c *ClientWithResponses) PutFleetAgentDownloadSourcesSourceidWithBodyWithResponse(ctx context.Context, sourceId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetAgentDownloadSourcesSourceidResponse, error) { + rsp, err := c.PutFleetAgentDownloadSourcesSourceidWithBody(ctx, sourceId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutFleetAgentDownloadSourcesSourceidResponse(rsp) +} - if a.Owner != nil { - object["owner"], err = json.Marshal(a.Owner) - if err != nil { - return nil, fmt.Errorf("error marshaling 'owner': %w", err) - } +func (c *ClientWithResponses) PutFleetAgentDownloadSourcesSourceidWithResponse(ctx context.Context, sourceId string, body PutFleetAgentDownloadSourcesSourceidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetAgentDownloadSourcesSourceidResponse, error) { + rsp, err := c.PutFleetAgentDownloadSourcesSourceid(ctx, sourceId, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutFleetAgentDownloadSourcesSourceidResponse(rsp) +} - if a.Path != nil { - object["path"], err = json.Marshal(a.Path) - if err != nil { - return nil, fmt.Errorf("error marshaling 'path': %w", err) - } +// GetFleetAgentPoliciesWithResponse request returning *GetFleetAgentPoliciesResponse +func (c *ClientWithResponses) GetFleetAgentPoliciesWithResponse(ctx context.Context, params *GetFleetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesResponse, error) { + rsp, err := c.GetFleetAgentPolicies(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetAgentPoliciesResponse(rsp) +} - if a.PolicyTemplates != nil { - object["policy_templates"], err = json.Marshal(a.PolicyTemplates) - if err != nil { - return nil, fmt.Errorf("error marshaling 'policy_templates': %w", err) - } +// PostFleetAgentPoliciesWithBodyWithResponse request with arbitrary body returning *PostFleetAgentPoliciesResponse +func (c *ClientWithResponses) PostFleetAgentPoliciesWithBodyWithResponse(ctx context.Context, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesResponse, error) { + rsp, err := c.PostFleetAgentPoliciesWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentPoliciesResponse(rsp) +} - if a.Readme != nil { - object["readme"], err = json.Marshal(a.Readme) - if err != nil { - return nil, fmt.Errorf("error marshaling 'readme': %w", err) - } +func (c *ClientWithResponses) PostFleetAgentPoliciesWithResponse(ctx context.Context, params *PostFleetAgentPoliciesParams, body PostFleetAgentPoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesResponse, error) { + rsp, err := c.PostFleetAgentPolicies(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentPoliciesResponse(rsp) +} - if a.Release != nil { - object["release"], err = json.Marshal(a.Release) - if err != nil { - return nil, fmt.Errorf("error marshaling 'release': %w", err) - } +// PostFleetAgentPoliciesBulkGetWithBodyWithResponse request with arbitrary body returning *PostFleetAgentPoliciesBulkGetResponse +func (c *ClientWithResponses) PostFleetAgentPoliciesBulkGetWithBodyWithResponse(ctx context.Context, params *PostFleetAgentPoliciesBulkGetParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesBulkGetResponse, error) { + rsp, err := c.PostFleetAgentPoliciesBulkGetWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentPoliciesBulkGetResponse(rsp) +} - if a.Screenshots != nil { - object["screenshots"], err = json.Marshal(a.Screenshots) - if err != nil { - return nil, fmt.Errorf("error marshaling 'screenshots': %w", err) - } +func (c *ClientWithResponses) PostFleetAgentPoliciesBulkGetWithResponse(ctx context.Context, params *PostFleetAgentPoliciesBulkGetParams, body PostFleetAgentPoliciesBulkGetJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesBulkGetResponse, error) { + rsp, err := c.PostFleetAgentPoliciesBulkGet(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentPoliciesBulkGetResponse(rsp) +} - if a.SignaturePath != nil { - object["signature_path"], err = json.Marshal(a.SignaturePath) - if err != nil { - return nil, fmt.Errorf("error marshaling 'signature_path': %w", err) - } +// PostFleetAgentPoliciesDeleteWithBodyWithResponse request with arbitrary body returning *PostFleetAgentPoliciesDeleteResponse +func (c *ClientWithResponses) PostFleetAgentPoliciesDeleteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesDeleteResponse, error) { + rsp, err := c.PostFleetAgentPoliciesDeleteWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentPoliciesDeleteResponse(rsp) +} - if a.Source != nil { - object["source"], err = json.Marshal(a.Source) - if err != nil { - return nil, fmt.Errorf("error marshaling 'source': %w", err) - } +func (c *ClientWithResponses) PostFleetAgentPoliciesDeleteWithResponse(ctx context.Context, body PostFleetAgentPoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesDeleteResponse, error) { + rsp, err := c.PostFleetAgentPoliciesDelete(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentPoliciesDeleteResponse(rsp) +} - if a.Status != nil { - object["status"], err = json.Marshal(a.Status) - if err != nil { - return nil, fmt.Errorf("error marshaling 'status': %w", err) - } +// PostFleetAgentPoliciesOutputsWithBodyWithResponse request with arbitrary body returning *PostFleetAgentPoliciesOutputsResponse +func (c *ClientWithResponses) PostFleetAgentPoliciesOutputsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesOutputsResponse, error) { + rsp, err := c.PostFleetAgentPoliciesOutputsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentPoliciesOutputsResponse(rsp) +} - object["title"], err = json.Marshal(a.Title) +func (c *ClientWithResponses) PostFleetAgentPoliciesOutputsWithResponse(ctx context.Context, body PostFleetAgentPoliciesOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesOutputsResponse, error) { + rsp, err := c.PostFleetAgentPoliciesOutputs(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'title': %w", err) + return nil, err } + return ParsePostFleetAgentPoliciesOutputsResponse(rsp) +} - if a.Type != nil { - object["type"], err = json.Marshal(a.Type) - if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) - } +// GetFleetAgentPoliciesAgentpolicyidWithResponse request returning *GetFleetAgentPoliciesAgentpolicyidResponse +func (c *ClientWithResponses) GetFleetAgentPoliciesAgentpolicyidWithResponse(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidResponse, error) { + rsp, err := c.GetFleetAgentPoliciesAgentpolicyid(ctx, agentPolicyId, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetAgentPoliciesAgentpolicyidResponse(rsp) +} - if a.Vars != nil { - object["vars"], err = json.Marshal(a.Vars) - if err != nil { - return nil, fmt.Errorf("error marshaling 'vars': %w", err) - } +// PutFleetAgentPoliciesAgentpolicyidWithBodyWithResponse request with arbitrary body returning *PutFleetAgentPoliciesAgentpolicyidResponse +func (c *ClientWithResponses) PutFleetAgentPoliciesAgentpolicyidWithBodyWithResponse(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetAgentPoliciesAgentpolicyidResponse, error) { + rsp, err := c.PutFleetAgentPoliciesAgentpolicyidWithBody(ctx, agentPolicyId, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutFleetAgentPoliciesAgentpolicyidResponse(rsp) +} - object["version"], err = json.Marshal(a.Version) +func (c *ClientWithResponses) PutFleetAgentPoliciesAgentpolicyidWithResponse(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, body PutFleetAgentPoliciesAgentpolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetAgentPoliciesAgentpolicyidResponse, error) { + rsp, err := c.PutFleetAgentPoliciesAgentpolicyid(ctx, agentPolicyId, params, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'version': %w", err) + return nil, err } + return ParsePutFleetAgentPoliciesAgentpolicyidResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusWithResponse request returning *GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusResponse +func (c *ClientWithResponses) GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusWithResponse(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusResponse, error) { + rsp, err := c.GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatus(ctx, agentPolicyId, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseGetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusResponse(rsp) } -// Getter for additional properties for PackageInfo_Conditions_Elastic. Returns the specified -// element and whether it was found -func (a PackageInfo_Conditions_Elastic) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// PostFleetAgentPoliciesAgentpolicyidCopyWithBodyWithResponse request with arbitrary body returning *PostFleetAgentPoliciesAgentpolicyidCopyResponse +func (c *ClientWithResponses) PostFleetAgentPoliciesAgentpolicyidCopyWithBodyWithResponse(ctx context.Context, agentPolicyId string, params *PostFleetAgentPoliciesAgentpolicyidCopyParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesAgentpolicyidCopyResponse, error) { + rsp, err := c.PostFleetAgentPoliciesAgentpolicyidCopyWithBody(ctx, agentPolicyId, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostFleetAgentPoliciesAgentpolicyidCopyResponse(rsp) } -// Setter for additional properties for PackageInfo_Conditions_Elastic -func (a *PackageInfo_Conditions_Elastic) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) PostFleetAgentPoliciesAgentpolicyidCopyWithResponse(ctx context.Context, agentPolicyId string, params *PostFleetAgentPoliciesAgentpolicyidCopyParams, body PostFleetAgentPoliciesAgentpolicyidCopyJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesAgentpolicyidCopyResponse, error) { + rsp, err := c.PostFleetAgentPoliciesAgentpolicyidCopy(ctx, agentPolicyId, params, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePostFleetAgentPoliciesAgentpolicyidCopyResponse(rsp) } -// Override default JSON handling for PackageInfo_Conditions_Elastic to handle AdditionalProperties -func (a *PackageInfo_Conditions_Elastic) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// GetFleetAgentPoliciesAgentpolicyidDownloadWithResponse request returning *GetFleetAgentPoliciesAgentpolicyidDownloadResponse +func (c *ClientWithResponses) GetFleetAgentPoliciesAgentpolicyidDownloadWithResponse(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidDownloadParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidDownloadResponse, error) { + rsp, err := c.GetFleetAgentPoliciesAgentpolicyidDownload(ctx, agentPolicyId, params, reqEditors...) if err != nil { - return err + return nil, err } + return ParseGetFleetAgentPoliciesAgentpolicyidDownloadResponse(rsp) +} - if raw, found := object["capabilities"]; found { - err = json.Unmarshal(raw, &a.Capabilities) - if err != nil { - return fmt.Errorf("error reading 'capabilities': %w", err) - } - delete(object, "capabilities") +// GetFleetAgentPoliciesAgentpolicyidFullWithResponse request returning *GetFleetAgentPoliciesAgentpolicyidFullResponse +func (c *ClientWithResponses) GetFleetAgentPoliciesAgentpolicyidFullWithResponse(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidFullParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidFullResponse, error) { + rsp, err := c.GetFleetAgentPoliciesAgentpolicyidFull(ctx, agentPolicyId, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetAgentPoliciesAgentpolicyidFullResponse(rsp) +} - if raw, found := object["subscription"]; found { - err = json.Unmarshal(raw, &a.Subscription) - if err != nil { - return fmt.Errorf("error reading 'subscription': %w", err) - } - delete(object, "subscription") +// GetFleetAgentPoliciesAgentpolicyidOutputsWithResponse request returning *GetFleetAgentPoliciesAgentpolicyidOutputsResponse +func (c *ClientWithResponses) GetFleetAgentPoliciesAgentpolicyidOutputsWithResponse(ctx context.Context, agentPolicyId string, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidOutputsResponse, error) { + rsp, err := c.GetFleetAgentPoliciesAgentpolicyidOutputs(ctx, agentPolicyId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetAgentPoliciesAgentpolicyidOutputsResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// GetFleetAgentStatusWithResponse request returning *GetFleetAgentStatusResponse +func (c *ClientWithResponses) GetFleetAgentStatusWithResponse(ctx context.Context, params *GetFleetAgentStatusParams, reqEditors ...RequestEditorFn) (*GetFleetAgentStatusResponse, error) { + rsp, err := c.GetFleetAgentStatus(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseGetFleetAgentStatusResponse(rsp) } -// Override default JSON handling for PackageInfo_Conditions_Elastic to handle AdditionalProperties -func (a PackageInfo_Conditions_Elastic) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - if a.Capabilities != nil { - object["capabilities"], err = json.Marshal(a.Capabilities) - if err != nil { - return nil, fmt.Errorf("error marshaling 'capabilities': %w", err) - } +// GetFleetAgentStatusDataWithResponse request returning *GetFleetAgentStatusDataResponse +func (c *ClientWithResponses) GetFleetAgentStatusDataWithResponse(ctx context.Context, params *GetFleetAgentStatusDataParams, reqEditors ...RequestEditorFn) (*GetFleetAgentStatusDataResponse, error) { + rsp, err := c.GetFleetAgentStatusData(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetAgentStatusDataResponse(rsp) +} - if a.Subscription != nil { - object["subscription"], err = json.Marshal(a.Subscription) - if err != nil { - return nil, fmt.Errorf("error marshaling 'subscription': %w", err) - } +// GetFleetAgentsWithResponse request returning *GetFleetAgentsResponse +func (c *ClientWithResponses) GetFleetAgentsWithResponse(ctx context.Context, params *GetFleetAgentsParams, reqEditors ...RequestEditorFn) (*GetFleetAgentsResponse, error) { + rsp, err := c.GetFleetAgents(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetAgentsResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// PostFleetAgentsWithBodyWithResponse request with arbitrary body returning *PostFleetAgentsResponse +func (c *ClientWithResponses) PostFleetAgentsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsResponse, error) { + rsp, err := c.PostFleetAgentsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostFleetAgentsResponse(rsp) } -// Getter for additional properties for PackageInfo_Conditions_Kibana. Returns the specified -// element and whether it was found -func (a PackageInfo_Conditions_Kibana) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) PostFleetAgentsWithResponse(ctx context.Context, body PostFleetAgentsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsResponse, error) { + rsp, err := c.PostFleetAgents(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostFleetAgentsResponse(rsp) } -// Setter for additional properties for PackageInfo_Conditions_Kibana -func (a *PackageInfo_Conditions_Kibana) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// GetFleetAgentsActionStatusWithResponse request returning *GetFleetAgentsActionStatusResponse +func (c *ClientWithResponses) GetFleetAgentsActionStatusWithResponse(ctx context.Context, params *GetFleetAgentsActionStatusParams, reqEditors ...RequestEditorFn) (*GetFleetAgentsActionStatusResponse, error) { + rsp, err := c.GetFleetAgentsActionStatus(ctx, params, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseGetFleetAgentsActionStatusResponse(rsp) } -// Override default JSON handling for PackageInfo_Conditions_Kibana to handle AdditionalProperties -func (a *PackageInfo_Conditions_Kibana) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// PostFleetAgentsActionsActionidCancelWithResponse request returning *PostFleetAgentsActionsActionidCancelResponse +func (c *ClientWithResponses) PostFleetAgentsActionsActionidCancelWithResponse(ctx context.Context, actionId string, reqEditors ...RequestEditorFn) (*PostFleetAgentsActionsActionidCancelResponse, error) { + rsp, err := c.PostFleetAgentsActionsActionidCancel(ctx, actionId, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePostFleetAgentsActionsActionidCancelResponse(rsp) +} - if raw, found := object["version"]; found { - err = json.Unmarshal(raw, &a.Version) - if err != nil { - return fmt.Errorf("error reading 'version': %w", err) - } - delete(object, "version") +// GetFleetAgentsAvailableVersionsWithResponse request returning *GetFleetAgentsAvailableVersionsResponse +func (c *ClientWithResponses) GetFleetAgentsAvailableVersionsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetAgentsAvailableVersionsResponse, error) { + rsp, err := c.GetFleetAgentsAvailableVersions(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetAgentsAvailableVersionsResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// PostFleetAgentsBulkReassignWithBodyWithResponse request with arbitrary body returning *PostFleetAgentsBulkReassignResponse +func (c *ClientWithResponses) PostFleetAgentsBulkReassignWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkReassignResponse, error) { + rsp, err := c.PostFleetAgentsBulkReassignWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePostFleetAgentsBulkReassignResponse(rsp) } -// Override default JSON handling for PackageInfo_Conditions_Kibana to handle AdditionalProperties -func (a PackageInfo_Conditions_Kibana) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +func (c *ClientWithResponses) PostFleetAgentsBulkReassignWithResponse(ctx context.Context, body PostFleetAgentsBulkReassignJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkReassignResponse, error) { + rsp, err := c.PostFleetAgentsBulkReassign(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostFleetAgentsBulkReassignResponse(rsp) +} - if a.Version != nil { - object["version"], err = json.Marshal(a.Version) - if err != nil { - return nil, fmt.Errorf("error marshaling 'version': %w", err) - } +// PostFleetAgentsBulkRequestDiagnosticsWithBodyWithResponse request with arbitrary body returning *PostFleetAgentsBulkRequestDiagnosticsResponse +func (c *ClientWithResponses) PostFleetAgentsBulkRequestDiagnosticsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkRequestDiagnosticsResponse, error) { + rsp, err := c.PostFleetAgentsBulkRequestDiagnosticsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentsBulkRequestDiagnosticsResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) PostFleetAgentsBulkRequestDiagnosticsWithResponse(ctx context.Context, body PostFleetAgentsBulkRequestDiagnosticsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkRequestDiagnosticsResponse, error) { + rsp, err := c.PostFleetAgentsBulkRequestDiagnostics(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostFleetAgentsBulkRequestDiagnosticsResponse(rsp) } -// Getter for additional properties for PackageInfo_Conditions. Returns the specified -// element and whether it was found -func (a PackageInfo_Conditions) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// PostFleetAgentsBulkUnenrollWithBodyWithResponse request with arbitrary body returning *PostFleetAgentsBulkUnenrollResponse +func (c *ClientWithResponses) PostFleetAgentsBulkUnenrollWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUnenrollResponse, error) { + rsp, err := c.PostFleetAgentsBulkUnenrollWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostFleetAgentsBulkUnenrollResponse(rsp) } -// Setter for additional properties for PackageInfo_Conditions -func (a *PackageInfo_Conditions) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) PostFleetAgentsBulkUnenrollWithResponse(ctx context.Context, body PostFleetAgentsBulkUnenrollJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUnenrollResponse, error) { + rsp, err := c.PostFleetAgentsBulkUnenroll(ctx, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePostFleetAgentsBulkUnenrollResponse(rsp) } -// Override default JSON handling for PackageInfo_Conditions to handle AdditionalProperties -func (a *PackageInfo_Conditions) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// PostFleetAgentsBulkUpdateAgentTagsWithBodyWithResponse request with arbitrary body returning *PostFleetAgentsBulkUpdateAgentTagsResponse +func (c *ClientWithResponses) PostFleetAgentsBulkUpdateAgentTagsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUpdateAgentTagsResponse, error) { + rsp, err := c.PostFleetAgentsBulkUpdateAgentTagsWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePostFleetAgentsBulkUpdateAgentTagsResponse(rsp) +} - if raw, found := object["elastic"]; found { - err = json.Unmarshal(raw, &a.Elastic) - if err != nil { - return fmt.Errorf("error reading 'elastic': %w", err) - } - delete(object, "elastic") +func (c *ClientWithResponses) PostFleetAgentsBulkUpdateAgentTagsWithResponse(ctx context.Context, body PostFleetAgentsBulkUpdateAgentTagsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUpdateAgentTagsResponse, error) { + rsp, err := c.PostFleetAgentsBulkUpdateAgentTags(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentsBulkUpdateAgentTagsResponse(rsp) +} - if raw, found := object["kibana"]; found { - err = json.Unmarshal(raw, &a.Kibana) - if err != nil { - return fmt.Errorf("error reading 'kibana': %w", err) - } - delete(object, "kibana") +// PostFleetAgentsBulkUpgradeWithBodyWithResponse request with arbitrary body returning *PostFleetAgentsBulkUpgradeResponse +func (c *ClientWithResponses) PostFleetAgentsBulkUpgradeWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUpgradeResponse, error) { + rsp, err := c.PostFleetAgentsBulkUpgradeWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentsBulkUpgradeResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PostFleetAgentsBulkUpgradeWithResponse(ctx context.Context, body PostFleetAgentsBulkUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsBulkUpgradeResponse, error) { + rsp, err := c.PostFleetAgentsBulkUpgrade(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePostFleetAgentsBulkUpgradeResponse(rsp) } -// Override default JSON handling for PackageInfo_Conditions to handle AdditionalProperties -func (a PackageInfo_Conditions) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// DeleteFleetAgentsFilesFileidWithResponse request returning *DeleteFleetAgentsFilesFileidResponse +func (c *ClientWithResponses) DeleteFleetAgentsFilesFileidWithResponse(ctx context.Context, fileId string, reqEditors ...RequestEditorFn) (*DeleteFleetAgentsFilesFileidResponse, error) { + rsp, err := c.DeleteFleetAgentsFilesFileid(ctx, fileId, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteFleetAgentsFilesFileidResponse(rsp) +} - if a.Elastic != nil { - object["elastic"], err = json.Marshal(a.Elastic) - if err != nil { - return nil, fmt.Errorf("error marshaling 'elastic': %w", err) - } +// GetFleetAgentsFilesFileidFilenameWithResponse request returning *GetFleetAgentsFilesFileidFilenameResponse +func (c *ClientWithResponses) GetFleetAgentsFilesFileidFilenameWithResponse(ctx context.Context, fileId string, fileName string, reqEditors ...RequestEditorFn) (*GetFleetAgentsFilesFileidFilenameResponse, error) { + rsp, err := c.GetFleetAgentsFilesFileidFilename(ctx, fileId, fileName, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetAgentsFilesFileidFilenameResponse(rsp) +} - if a.Kibana != nil { - object["kibana"], err = json.Marshal(a.Kibana) - if err != nil { - return nil, fmt.Errorf("error marshaling 'kibana': %w", err) - } +// GetFleetAgentsSetupWithResponse request returning *GetFleetAgentsSetupResponse +func (c *ClientWithResponses) GetFleetAgentsSetupWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetAgentsSetupResponse, error) { + rsp, err := c.GetFleetAgentsSetup(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetAgentsSetupResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// PostFleetAgentsSetupWithResponse request returning *PostFleetAgentsSetupResponse +func (c *ClientWithResponses) PostFleetAgentsSetupWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*PostFleetAgentsSetupResponse, error) { + rsp, err := c.PostFleetAgentsSetup(ctx, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostFleetAgentsSetupResponse(rsp) } -// Getter for additional properties for PackageInfo_Discovery_Fields_Item. Returns the specified -// element and whether it was found -func (a PackageInfo_Discovery_Fields_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// GetFleetAgentsTagsWithResponse request returning *GetFleetAgentsTagsResponse +func (c *ClientWithResponses) GetFleetAgentsTagsWithResponse(ctx context.Context, params *GetFleetAgentsTagsParams, reqEditors ...RequestEditorFn) (*GetFleetAgentsTagsResponse, error) { + rsp, err := c.GetFleetAgentsTags(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return + return ParseGetFleetAgentsTagsResponse(rsp) } -// Setter for additional properties for PackageInfo_Discovery_Fields_Item -func (a *PackageInfo_Discovery_Fields_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// DeleteFleetAgentsAgentidWithResponse request returning *DeleteFleetAgentsAgentidResponse +func (c *ClientWithResponses) DeleteFleetAgentsAgentidWithResponse(ctx context.Context, agentId string, reqEditors ...RequestEditorFn) (*DeleteFleetAgentsAgentidResponse, error) { + rsp, err := c.DeleteFleetAgentsAgentid(ctx, agentId, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseDeleteFleetAgentsAgentidResponse(rsp) } -// Override default JSON handling for PackageInfo_Discovery_Fields_Item to handle AdditionalProperties -func (a *PackageInfo_Discovery_Fields_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// GetFleetAgentsAgentidWithResponse request returning *GetFleetAgentsAgentidResponse +func (c *ClientWithResponses) GetFleetAgentsAgentidWithResponse(ctx context.Context, agentId string, params *GetFleetAgentsAgentidParams, reqEditors ...RequestEditorFn) (*GetFleetAgentsAgentidResponse, error) { + rsp, err := c.GetFleetAgentsAgentid(ctx, agentId, params, reqEditors...) if err != nil { - return err + return nil, err } + return ParseGetFleetAgentsAgentidResponse(rsp) +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// PutFleetAgentsAgentidWithBodyWithResponse request with arbitrary body returning *PutFleetAgentsAgentidResponse +func (c *ClientWithResponses) PutFleetAgentsAgentidWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetAgentsAgentidResponse, error) { + rsp, err := c.PutFleetAgentsAgentidWithBody(ctx, agentId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutFleetAgentsAgentidResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PutFleetAgentsAgentidWithResponse(ctx context.Context, agentId string, body PutFleetAgentsAgentidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetAgentsAgentidResponse, error) { + rsp, err := c.PutFleetAgentsAgentid(ctx, agentId, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePutFleetAgentsAgentidResponse(rsp) } -// Override default JSON handling for PackageInfo_Discovery_Fields_Item to handle AdditionalProperties -func (a PackageInfo_Discovery_Fields_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// PostFleetAgentsAgentidActionsWithBodyWithResponse request with arbitrary body returning *PostFleetAgentsAgentidActionsResponse +func (c *ClientWithResponses) PostFleetAgentsAgentidActionsWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidActionsResponse, error) { + rsp, err := c.PostFleetAgentsAgentidActionsWithBody(ctx, agentId, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostFleetAgentsAgentidActionsResponse(rsp) +} - object["name"], err = json.Marshal(a.Name) +func (c *ClientWithResponses) PostFleetAgentsAgentidActionsWithResponse(ctx context.Context, agentId string, body PostFleetAgentsAgentidActionsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidActionsResponse, error) { + rsp, err := c.PostFleetAgentsAgentidActions(ctx, agentId, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) + return nil, err } + return ParsePostFleetAgentsAgentidActionsResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// PostFleetAgentsAgentidReassignWithBodyWithResponse request with arbitrary body returning *PostFleetAgentsAgentidReassignResponse +func (c *ClientWithResponses) PostFleetAgentsAgentidReassignWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidReassignResponse, error) { + rsp, err := c.PostFleetAgentsAgentidReassignWithBody(ctx, agentId, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostFleetAgentsAgentidReassignResponse(rsp) } -// Getter for additional properties for PackageInfo_Discovery. Returns the specified -// element and whether it was found -func (a PackageInfo_Discovery) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) PostFleetAgentsAgentidReassignWithResponse(ctx context.Context, agentId string, body PostFleetAgentsAgentidReassignJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidReassignResponse, error) { + rsp, err := c.PostFleetAgentsAgentidReassign(ctx, agentId, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostFleetAgentsAgentidReassignResponse(rsp) } -// Setter for additional properties for PackageInfo_Discovery -func (a *PackageInfo_Discovery) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// PostFleetAgentsAgentidRequestDiagnosticsWithBodyWithResponse request with arbitrary body returning *PostFleetAgentsAgentidRequestDiagnosticsResponse +func (c *ClientWithResponses) PostFleetAgentsAgentidRequestDiagnosticsWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidRequestDiagnosticsResponse, error) { + rsp, err := c.PostFleetAgentsAgentidRequestDiagnosticsWithBody(ctx, agentId, contentType, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePostFleetAgentsAgentidRequestDiagnosticsResponse(rsp) } -// Override default JSON handling for PackageInfo_Discovery to handle AdditionalProperties -func (a *PackageInfo_Discovery) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +func (c *ClientWithResponses) PostFleetAgentsAgentidRequestDiagnosticsWithResponse(ctx context.Context, agentId string, body PostFleetAgentsAgentidRequestDiagnosticsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidRequestDiagnosticsResponse, error) { + rsp, err := c.PostFleetAgentsAgentidRequestDiagnostics(ctx, agentId, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePostFleetAgentsAgentidRequestDiagnosticsResponse(rsp) +} - if raw, found := object["fields"]; found { - err = json.Unmarshal(raw, &a.Fields) - if err != nil { - return fmt.Errorf("error reading 'fields': %w", err) - } - delete(object, "fields") +// PostFleetAgentsAgentidUnenrollWithBodyWithResponse request with arbitrary body returning *PostFleetAgentsAgentidUnenrollResponse +func (c *ClientWithResponses) PostFleetAgentsAgentidUnenrollWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidUnenrollResponse, error) { + rsp, err := c.PostFleetAgentsAgentidUnenrollWithBody(ctx, agentId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentsAgentidUnenrollResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PostFleetAgentsAgentidUnenrollWithResponse(ctx context.Context, agentId string, body PostFleetAgentsAgentidUnenrollJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidUnenrollResponse, error) { + rsp, err := c.PostFleetAgentsAgentidUnenroll(ctx, agentId, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePostFleetAgentsAgentidUnenrollResponse(rsp) } -// Override default JSON handling for PackageInfo_Discovery to handle AdditionalProperties -func (a PackageInfo_Discovery) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// PostFleetAgentsAgentidUpgradeWithBodyWithResponse request with arbitrary body returning *PostFleetAgentsAgentidUpgradeResponse +func (c *ClientWithResponses) PostFleetAgentsAgentidUpgradeWithBodyWithResponse(ctx context.Context, agentId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidUpgradeResponse, error) { + rsp, err := c.PostFleetAgentsAgentidUpgradeWithBody(ctx, agentId, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostFleetAgentsAgentidUpgradeResponse(rsp) +} - if a.Fields != nil { - object["fields"], err = json.Marshal(a.Fields) - if err != nil { - return nil, fmt.Errorf("error marshaling 'fields': %w", err) - } +func (c *ClientWithResponses) PostFleetAgentsAgentidUpgradeWithResponse(ctx context.Context, agentId string, body PostFleetAgentsAgentidUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentsAgentidUpgradeResponse, error) { + rsp, err := c.PostFleetAgentsAgentidUpgrade(ctx, agentId, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetAgentsAgentidUpgradeResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// GetFleetAgentsAgentidUploadsWithResponse request returning *GetFleetAgentsAgentidUploadsResponse +func (c *ClientWithResponses) GetFleetAgentsAgentidUploadsWithResponse(ctx context.Context, agentId string, reqEditors ...RequestEditorFn) (*GetFleetAgentsAgentidUploadsResponse, error) { + rsp, err := c.GetFleetAgentsAgentidUploads(ctx, agentId, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseGetFleetAgentsAgentidUploadsResponse(rsp) } -// Getter for additional properties for PackageInfo_Icons_Item. Returns the specified -// element and whether it was found -func (a PackageInfo_Icons_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// GetFleetCheckPermissionsWithResponse request returning *GetFleetCheckPermissionsResponse +func (c *ClientWithResponses) GetFleetCheckPermissionsWithResponse(ctx context.Context, params *GetFleetCheckPermissionsParams, reqEditors ...RequestEditorFn) (*GetFleetCheckPermissionsResponse, error) { + rsp, err := c.GetFleetCheckPermissions(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return + return ParseGetFleetCheckPermissionsResponse(rsp) } -// Setter for additional properties for PackageInfo_Icons_Item -func (a *PackageInfo_Icons_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// GetFleetDataStreamsWithResponse request returning *GetFleetDataStreamsResponse +func (c *ClientWithResponses) GetFleetDataStreamsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetDataStreamsResponse, error) { + rsp, err := c.GetFleetDataStreams(ctx, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseGetFleetDataStreamsResponse(rsp) } -// Override default JSON handling for PackageInfo_Icons_Item to handle AdditionalProperties -func (a *PackageInfo_Icons_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// GetFleetEnrollmentApiKeysWithResponse request returning *GetFleetEnrollmentApiKeysResponse +func (c *ClientWithResponses) GetFleetEnrollmentApiKeysWithResponse(ctx context.Context, params *GetFleetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*GetFleetEnrollmentApiKeysResponse, error) { + rsp, err := c.GetFleetEnrollmentApiKeys(ctx, params, reqEditors...) if err != nil { - return err + return nil, err } + return ParseGetFleetEnrollmentApiKeysResponse(rsp) +} - if raw, found := object["dark_mode"]; found { - err = json.Unmarshal(raw, &a.DarkMode) - if err != nil { - return fmt.Errorf("error reading 'dark_mode': %w", err) - } - delete(object, "dark_mode") +// PostFleetEnrollmentApiKeysWithBodyWithResponse request with arbitrary body returning *PostFleetEnrollmentApiKeysResponse +func (c *ClientWithResponses) PostFleetEnrollmentApiKeysWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEnrollmentApiKeysResponse, error) { + rsp, err := c.PostFleetEnrollmentApiKeysWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetEnrollmentApiKeysResponse(rsp) +} - if raw, found := object["path"]; found { - err = json.Unmarshal(raw, &a.Path) - if err != nil { - return fmt.Errorf("error reading 'path': %w", err) - } - delete(object, "path") +func (c *ClientWithResponses) PostFleetEnrollmentApiKeysWithResponse(ctx context.Context, body PostFleetEnrollmentApiKeysJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEnrollmentApiKeysResponse, error) { + rsp, err := c.PostFleetEnrollmentApiKeys(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetEnrollmentApiKeysResponse(rsp) +} - if raw, found := object["size"]; found { - err = json.Unmarshal(raw, &a.Size) - if err != nil { - return fmt.Errorf("error reading 'size': %w", err) - } - delete(object, "size") +// DeleteFleetEnrollmentApiKeysKeyidWithResponse request returning *DeleteFleetEnrollmentApiKeysKeyidResponse +func (c *ClientWithResponses) DeleteFleetEnrollmentApiKeysKeyidWithResponse(ctx context.Context, keyId string, reqEditors ...RequestEditorFn) (*DeleteFleetEnrollmentApiKeysKeyidResponse, error) { + rsp, err := c.DeleteFleetEnrollmentApiKeysKeyid(ctx, keyId, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteFleetEnrollmentApiKeysKeyidResponse(rsp) +} - if raw, found := object["src"]; found { - err = json.Unmarshal(raw, &a.Src) - if err != nil { - return fmt.Errorf("error reading 'src': %w", err) - } - delete(object, "src") +// GetFleetEnrollmentApiKeysKeyidWithResponse request returning *GetFleetEnrollmentApiKeysKeyidResponse +func (c *ClientWithResponses) GetFleetEnrollmentApiKeysKeyidWithResponse(ctx context.Context, keyId string, reqEditors ...RequestEditorFn) (*GetFleetEnrollmentApiKeysKeyidResponse, error) { + rsp, err := c.GetFleetEnrollmentApiKeysKeyid(ctx, keyId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetEnrollmentApiKeysKeyidResponse(rsp) +} - if raw, found := object["title"]; found { - err = json.Unmarshal(raw, &a.Title) - if err != nil { - return fmt.Errorf("error reading 'title': %w", err) - } - delete(object, "title") +// PostFleetEpmBulkAssetsWithBodyWithResponse request with arbitrary body returning *PostFleetEpmBulkAssetsResponse +func (c *ClientWithResponses) PostFleetEpmBulkAssetsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmBulkAssetsResponse, error) { + rsp, err := c.PostFleetEpmBulkAssetsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetEpmBulkAssetsResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +func (c *ClientWithResponses) PostFleetEpmBulkAssetsWithResponse(ctx context.Context, body PostFleetEpmBulkAssetsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmBulkAssetsResponse, error) { + rsp, err := c.PostFleetEpmBulkAssets(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetEpmBulkAssetsResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// GetFleetEpmCategoriesWithResponse request returning *GetFleetEpmCategoriesResponse +func (c *ClientWithResponses) GetFleetEpmCategoriesWithResponse(ctx context.Context, params *GetFleetEpmCategoriesParams, reqEditors ...RequestEditorFn) (*GetFleetEpmCategoriesResponse, error) { + rsp, err := c.GetFleetEpmCategories(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseGetFleetEpmCategoriesResponse(rsp) } -// Override default JSON handling for PackageInfo_Icons_Item to handle AdditionalProperties -func (a PackageInfo_Icons_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// PostFleetEpmCustomIntegrationsWithBodyWithResponse request with arbitrary body returning *PostFleetEpmCustomIntegrationsResponse +func (c *ClientWithResponses) PostFleetEpmCustomIntegrationsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmCustomIntegrationsResponse, error) { + rsp, err := c.PostFleetEpmCustomIntegrationsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostFleetEpmCustomIntegrationsResponse(rsp) +} - if a.DarkMode != nil { - object["dark_mode"], err = json.Marshal(a.DarkMode) - if err != nil { - return nil, fmt.Errorf("error marshaling 'dark_mode': %w", err) - } +func (c *ClientWithResponses) PostFleetEpmCustomIntegrationsWithResponse(ctx context.Context, body PostFleetEpmCustomIntegrationsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmCustomIntegrationsResponse, error) { + rsp, err := c.PostFleetEpmCustomIntegrations(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetEpmCustomIntegrationsResponse(rsp) +} - if a.Path != nil { - object["path"], err = json.Marshal(a.Path) - if err != nil { - return nil, fmt.Errorf("error marshaling 'path': %w", err) - } +// PutFleetEpmCustomIntegrationsPkgnameWithBodyWithResponse request with arbitrary body returning *PutFleetEpmCustomIntegrationsPkgnameResponse +func (c *ClientWithResponses) PutFleetEpmCustomIntegrationsPkgnameWithBodyWithResponse(ctx context.Context, pkgName string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetEpmCustomIntegrationsPkgnameResponse, error) { + rsp, err := c.PutFleetEpmCustomIntegrationsPkgnameWithBody(ctx, pkgName, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutFleetEpmCustomIntegrationsPkgnameResponse(rsp) +} - if a.Size != nil { - object["size"], err = json.Marshal(a.Size) - if err != nil { - return nil, fmt.Errorf("error marshaling 'size': %w", err) - } +func (c *ClientWithResponses) PutFleetEpmCustomIntegrationsPkgnameWithResponse(ctx context.Context, pkgName string, body PutFleetEpmCustomIntegrationsPkgnameJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetEpmCustomIntegrationsPkgnameResponse, error) { + rsp, err := c.PutFleetEpmCustomIntegrationsPkgname(ctx, pkgName, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutFleetEpmCustomIntegrationsPkgnameResponse(rsp) +} - object["src"], err = json.Marshal(a.Src) +// GetFleetEpmDataStreamsWithResponse request returning *GetFleetEpmDataStreamsResponse +func (c *ClientWithResponses) GetFleetEpmDataStreamsWithResponse(ctx context.Context, params *GetFleetEpmDataStreamsParams, reqEditors ...RequestEditorFn) (*GetFleetEpmDataStreamsResponse, error) { + rsp, err := c.GetFleetEpmDataStreams(ctx, params, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'src': %w", err) + return nil, err } + return ParseGetFleetEpmDataStreamsResponse(rsp) +} - if a.Title != nil { - object["title"], err = json.Marshal(a.Title) - if err != nil { - return nil, fmt.Errorf("error marshaling 'title': %w", err) - } +// GetFleetEpmPackagesWithResponse request returning *GetFleetEpmPackagesResponse +func (c *ClientWithResponses) GetFleetEpmPackagesWithResponse(ctx context.Context, params *GetFleetEpmPackagesParams, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesResponse, error) { + rsp, err := c.GetFleetEpmPackages(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetEpmPackagesResponse(rsp) +} - if a.Type != nil { - object["type"], err = json.Marshal(a.Type) - if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) - } +// PostFleetEpmPackagesWithBodyWithResponse request with arbitrary body returning *PostFleetEpmPackagesResponse +func (c *ClientWithResponses) PostFleetEpmPackagesWithBodyWithResponse(ctx context.Context, params *PostFleetEpmPackagesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesResponse, error) { + rsp, err := c.PostFleetEpmPackagesWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetEpmPackagesResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// PostFleetEpmPackagesBulkWithBodyWithResponse request with arbitrary body returning *PostFleetEpmPackagesBulkResponse +func (c *ClientWithResponses) PostFleetEpmPackagesBulkWithBodyWithResponse(ctx context.Context, params *PostFleetEpmPackagesBulkParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkResponse, error) { + rsp, err := c.PostFleetEpmPackagesBulkWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostFleetEpmPackagesBulkResponse(rsp) } -// Getter for additional properties for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item. Returns the specified -// element and whether it was found -func (a PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) PostFleetEpmPackagesBulkWithResponse(ctx context.Context, params *PostFleetEpmPackagesBulkParams, body PostFleetEpmPackagesBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkResponse, error) { + rsp, err := c.PostFleetEpmPackagesBulk(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostFleetEpmPackagesBulkResponse(rsp) } -// Setter for additional properties for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item -func (a *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// PostFleetEpmPackagesBulkUninstallWithBodyWithResponse request with arbitrary body returning *PostFleetEpmPackagesBulkUninstallResponse +func (c *ClientWithResponses) PostFleetEpmPackagesBulkUninstallWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkUninstallResponse, error) { + rsp, err := c.PostFleetEpmPackagesBulkUninstallWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePostFleetEpmPackagesBulkUninstallResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties -func (a *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +func (c *ClientWithResponses) PostFleetEpmPackagesBulkUninstallWithResponse(ctx context.Context, body PostFleetEpmPackagesBulkUninstallJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkUninstallResponse, error) { + rsp, err := c.PostFleetEpmPackagesBulkUninstall(ctx, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePostFleetEpmPackagesBulkUninstallResponse(rsp) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// GetFleetEpmPackagesBulkUninstallTaskidWithResponse request returning *GetFleetEpmPackagesBulkUninstallTaskidResponse +func (c *ClientWithResponses) GetFleetEpmPackagesBulkUninstallTaskidWithResponse(ctx context.Context, taskId string, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesBulkUninstallTaskidResponse, error) { + rsp, err := c.GetFleetEpmPackagesBulkUninstallTaskid(ctx, taskId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetEpmPackagesBulkUninstallTaskidResponse(rsp) +} - if raw, found := object["originId"]; found { - err = json.Unmarshal(raw, &a.OriginId) - if err != nil { - return fmt.Errorf("error reading 'originId': %w", err) - } - delete(object, "originId") +// PostFleetEpmPackagesBulkUpgradeWithBodyWithResponse request with arbitrary body returning *PostFleetEpmPackagesBulkUpgradeResponse +func (c *ClientWithResponses) PostFleetEpmPackagesBulkUpgradeWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkUpgradeResponse, error) { + rsp, err := c.PostFleetEpmPackagesBulkUpgradeWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetEpmPackagesBulkUpgradeResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +func (c *ClientWithResponses) PostFleetEpmPackagesBulkUpgradeWithResponse(ctx context.Context, body PostFleetEpmPackagesBulkUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesBulkUpgradeResponse, error) { + rsp, err := c.PostFleetEpmPackagesBulkUpgrade(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetEpmPackagesBulkUpgradeResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// GetFleetEpmPackagesBulkUpgradeTaskidWithResponse request returning *GetFleetEpmPackagesBulkUpgradeTaskidResponse +func (c *ClientWithResponses) GetFleetEpmPackagesBulkUpgradeTaskidWithResponse(ctx context.Context, taskId string, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesBulkUpgradeTaskidResponse, error) { + rsp, err := c.GetFleetEpmPackagesBulkUpgradeTaskid(ctx, taskId, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseGetFleetEpmPackagesBulkUpgradeTaskidResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties -func (a PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// GetFleetEpmPackagesInstalledWithResponse request returning *GetFleetEpmPackagesInstalledResponse +func (c *ClientWithResponses) GetFleetEpmPackagesInstalledWithResponse(ctx context.Context, params *GetFleetEpmPackagesInstalledParams, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesInstalledResponse, error) { + rsp, err := c.GetFleetEpmPackagesInstalled(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetFleetEpmPackagesInstalledResponse(rsp) +} - object["id"], err = json.Marshal(a.Id) +// GetFleetEpmPackagesLimitedWithResponse request returning *GetFleetEpmPackagesLimitedResponse +func (c *ClientWithResponses) GetFleetEpmPackagesLimitedWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesLimitedResponse, error) { + rsp, err := c.GetFleetEpmPackagesLimited(ctx, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) + return nil, err } + return ParseGetFleetEpmPackagesLimitedResponse(rsp) +} - if a.OriginId != nil { - object["originId"], err = json.Marshal(a.OriginId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'originId': %w", err) - } +// GetFleetEpmPackagesPkgnameStatsWithResponse request returning *GetFleetEpmPackagesPkgnameStatsResponse +func (c *ClientWithResponses) GetFleetEpmPackagesPkgnameStatsWithResponse(ctx context.Context, pkgName string, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesPkgnameStatsResponse, error) { + rsp, err := c.GetFleetEpmPackagesPkgnameStats(ctx, pkgName, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetEpmPackagesPkgnameStatsResponse(rsp) +} - object["type"], err = json.Marshal(a.Type) +// DeleteFleetEpmPackagesPkgnamePkgversionWithResponse request returning *DeleteFleetEpmPackagesPkgnamePkgversionResponse +func (c *ClientWithResponses) DeleteFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*DeleteFleetEpmPackagesPkgnamePkgversionResponse, error) { + rsp, err := c.DeleteFleetEpmPackagesPkgnamePkgversion(ctx, pkgName, pkgVersion, params, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) + return nil, err } + return ParseDeleteFleetEpmPackagesPkgnamePkgversionResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// GetFleetEpmPackagesPkgnamePkgversionWithResponse request returning *GetFleetEpmPackagesPkgnamePkgversionResponse +func (c *ClientWithResponses) GetFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesPkgnamePkgversionResponse, error) { + rsp, err := c.GetFleetEpmPackagesPkgnamePkgversion(ctx, pkgName, pkgVersion, params, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseGetFleetEpmPackagesPkgnamePkgversionResponse(rsp) } -// Getter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features. Returns the specified -// element and whether it was found -func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// PostFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse request with arbitrary body returning *PostFleetEpmPackagesPkgnamePkgversionResponse +func (c *ClientWithResponses) PostFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionResponse, error) { + rsp, err := c.PostFleetEpmPackagesPkgnamePkgversionWithBody(ctx, pkgName, pkgVersion, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostFleetEpmPackagesPkgnamePkgversionResponse(rsp) } -// Setter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features -func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) PostFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, body PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionResponse, error) { + rsp, err := c.PostFleetEpmPackagesPkgnamePkgversion(ctx, pkgName, pkgVersion, params, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePostFleetEpmPackagesPkgnamePkgversionResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties -func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// PutFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse request with arbitrary body returning *PutFleetEpmPackagesPkgnamePkgversionResponse +func (c *ClientWithResponses) PutFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetEpmPackagesPkgnamePkgversionResponse, error) { + rsp, err := c.PutFleetEpmPackagesPkgnamePkgversionWithBody(ctx, pkgName, pkgVersion, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePutFleetEpmPackagesPkgnamePkgversionResponse(rsp) +} - if raw, found := object["doc_value_only_numeric"]; found { - err = json.Unmarshal(raw, &a.DocValueOnlyNumeric) - if err != nil { - return fmt.Errorf("error reading 'doc_value_only_numeric': %w", err) - } - delete(object, "doc_value_only_numeric") +func (c *ClientWithResponses) PutFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, body PutFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetEpmPackagesPkgnamePkgversionResponse, error) { + rsp, err := c.PutFleetEpmPackagesPkgnamePkgversion(ctx, pkgName, pkgVersion, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutFleetEpmPackagesPkgnamePkgversionResponse(rsp) +} - if raw, found := object["doc_value_only_other"]; found { - err = json.Unmarshal(raw, &a.DocValueOnlyOther) - if err != nil { - return fmt.Errorf("error reading 'doc_value_only_other': %w", err) - } - delete(object, "doc_value_only_other") +// DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsWithResponse request returning *DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsResponse +func (c *ClientWithResponses) DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsParams, reqEditors ...RequestEditorFn) (*DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsResponse, error) { + rsp, err := c.DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssets(ctx, pkgName, pkgVersion, params, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsResponse(rsp) +} - if raw, found := object["synthetic_source"]; found { - err = json.Unmarshal(raw, &a.SyntheticSource) - if err != nil { - return fmt.Errorf("error reading 'synthetic_source': %w", err) - } - delete(object, "synthetic_source") +// DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithResponse request returning *DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse +func (c *ClientWithResponses) DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithResponse(ctx context.Context, pkgName string, pkgVersion string, reqEditors ...RequestEditorFn) (*DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse, error) { + rsp, err := c.DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssets(ctx, pkgName, pkgVersion, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse(rsp) +} - if raw, found := object["tsdb"]; found { - err = json.Unmarshal(raw, &a.Tsdb) - if err != nil { - return fmt.Errorf("error reading 'tsdb': %w", err) - } - delete(object, "tsdb") +// PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithBodyWithResponse request with arbitrary body returning *PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse +func (c *ClientWithResponses) PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse, error) { + rsp, err := c.PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithBody(ctx, pkgName, pkgVersion, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithResponse(ctx context.Context, pkgName string, pkgVersion string, body PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse, error) { + rsp, err := c.PostFleetEpmPackagesPkgnamePkgversionKibanaAssets(ctx, pkgName, pkgVersion, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties -func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Features) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeWithBodyWithResponse request with arbitrary body returning *PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse +func (c *ClientWithResponses) PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse, error) { + rsp, err := c.PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeWithBody(ctx, pkgName, pkgVersion, params, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse(rsp) +} - if a.DocValueOnlyNumeric != nil { - object["doc_value_only_numeric"], err = json.Marshal(a.DocValueOnlyNumeric) - if err != nil { - return nil, fmt.Errorf("error marshaling 'doc_value_only_numeric': %w", err) - } +func (c *ClientWithResponses) PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeParams, body PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse, error) { + rsp, err := c.PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorize(ctx, pkgName, pkgVersion, params, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse(rsp) +} - if a.DocValueOnlyOther != nil { - object["doc_value_only_other"], err = json.Marshal(a.DocValueOnlyOther) - if err != nil { - return nil, fmt.Errorf("error marshaling 'doc_value_only_other': %w", err) - } +// GetFleetEpmPackagesPkgnamePkgversionFilepathWithResponse request returning *GetFleetEpmPackagesPkgnamePkgversionFilepathResponse +func (c *ClientWithResponses) GetFleetEpmPackagesPkgnamePkgversionFilepathWithResponse(ctx context.Context, pkgName string, pkgVersion string, filePath string, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesPkgnamePkgversionFilepathResponse, error) { + rsp, err := c.GetFleetEpmPackagesPkgnamePkgversionFilepath(ctx, pkgName, pkgVersion, filePath, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetEpmPackagesPkgnamePkgversionFilepathResponse(rsp) +} - if a.SyntheticSource != nil { - object["synthetic_source"], err = json.Marshal(a.SyntheticSource) - if err != nil { - return nil, fmt.Errorf("error marshaling 'synthetic_source': %w", err) - } +// GetFleetEpmTemplatesPkgnamePkgversionInputsWithResponse request returning *GetFleetEpmTemplatesPkgnamePkgversionInputsResponse +func (c *ClientWithResponses) GetFleetEpmTemplatesPkgnamePkgversionInputsWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmTemplatesPkgnamePkgversionInputsParams, reqEditors ...RequestEditorFn) (*GetFleetEpmTemplatesPkgnamePkgversionInputsResponse, error) { + rsp, err := c.GetFleetEpmTemplatesPkgnamePkgversionInputs(ctx, pkgName, pkgVersion, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetEpmTemplatesPkgnamePkgversionInputsResponse(rsp) +} - if a.Tsdb != nil { - object["tsdb"], err = json.Marshal(a.Tsdb) - if err != nil { - return nil, fmt.Errorf("error marshaling 'tsdb': %w", err) - } +// GetFleetEpmVerificationKeyIdWithResponse request returning *GetFleetEpmVerificationKeyIdResponse +func (c *ClientWithResponses) GetFleetEpmVerificationKeyIdWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetEpmVerificationKeyIdResponse, error) { + rsp, err := c.GetFleetEpmVerificationKeyId(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetEpmVerificationKeyIdResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// GetFleetFleetServerHostsWithResponse request returning *GetFleetFleetServerHostsResponse +func (c *ClientWithResponses) GetFleetFleetServerHostsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetFleetServerHostsResponse, error) { + rsp, err := c.GetFleetFleetServerHosts(ctx, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseGetFleetFleetServerHostsResponse(rsp) } -// Getter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item. Returns the specified -// element and whether it was found -func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// PostFleetFleetServerHostsWithBodyWithResponse request with arbitrary body returning *PostFleetFleetServerHostsResponse +func (c *ClientWithResponses) PostFleetFleetServerHostsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetFleetServerHostsResponse, error) { + rsp, err := c.PostFleetFleetServerHostsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostFleetFleetServerHostsResponse(rsp) } -// Setter for additional properties for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item -func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) PostFleetFleetServerHostsWithResponse(ctx context.Context, body PostFleetFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetFleetServerHostsResponse, error) { + rsp, err := c.PostFleetFleetServerHosts(ctx, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePostFleetFleetServerHostsResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties -func (a *PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// DeleteFleetFleetServerHostsItemidWithResponse request returning *DeleteFleetFleetServerHostsItemidResponse +func (c *ClientWithResponses) DeleteFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*DeleteFleetFleetServerHostsItemidResponse, error) { + rsp, err := c.DeleteFleetFleetServerHostsItemid(ctx, itemId, reqEditors...) if err != nil { - return err + return nil, err } + return ParseDeleteFleetFleetServerHostsItemidResponse(rsp) +} - if raw, found := object["data_stream"]; found { - err = json.Unmarshal(raw, &a.DataStream) - if err != nil { - return fmt.Errorf("error reading 'data_stream': %w", err) - } - delete(object, "data_stream") +// GetFleetFleetServerHostsItemidWithResponse request returning *GetFleetFleetServerHostsItemidResponse +func (c *ClientWithResponses) GetFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*GetFleetFleetServerHostsItemidResponse, error) { + rsp, err := c.GetFleetFleetServerHostsItemid(ctx, itemId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetFleetServerHostsItemidResponse(rsp) +} - if raw, found := object["features"]; found { - err = json.Unmarshal(raw, &a.Features) - if err != nil { - return fmt.Errorf("error reading 'features': %w", err) - } - delete(object, "features") +// PutFleetFleetServerHostsItemidWithBodyWithResponse request with arbitrary body returning *PutFleetFleetServerHostsItemidResponse +func (c *ClientWithResponses) PutFleetFleetServerHostsItemidWithBodyWithResponse(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetFleetServerHostsItemidResponse, error) { + rsp, err := c.PutFleetFleetServerHostsItemidWithBody(ctx, itemId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutFleetFleetServerHostsItemidResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PutFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, body PutFleetFleetServerHostsItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetFleetServerHostsItemidResponse, error) { + rsp, err := c.PutFleetFleetServerHostsItemid(ctx, itemId, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePutFleetFleetServerHostsItemidResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties -func (a PackageInfo_InstallationInfo_ExperimentalDataStreamFeatures_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// PostFleetHealthCheckWithBodyWithResponse request with arbitrary body returning *PostFleetHealthCheckResponse +func (c *ClientWithResponses) PostFleetHealthCheckWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetHealthCheckResponse, error) { + rsp, err := c.PostFleetHealthCheckWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostFleetHealthCheckResponse(rsp) +} - object["data_stream"], err = json.Marshal(a.DataStream) +func (c *ClientWithResponses) PostFleetHealthCheckWithResponse(ctx context.Context, body PostFleetHealthCheckJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetHealthCheckResponse, error) { + rsp, err := c.PostFleetHealthCheck(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'data_stream': %w", err) + return nil, err } + return ParsePostFleetHealthCheckResponse(rsp) +} - object["features"], err = json.Marshal(a.Features) +// GetFleetKubernetesWithResponse request returning *GetFleetKubernetesResponse +func (c *ClientWithResponses) GetFleetKubernetesWithResponse(ctx context.Context, params *GetFleetKubernetesParams, reqEditors ...RequestEditorFn) (*GetFleetKubernetesResponse, error) { + rsp, err := c.GetFleetKubernetes(ctx, params, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'features': %w", err) + return nil, err } + return ParseGetFleetKubernetesResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// GetFleetKubernetesDownloadWithResponse request returning *GetFleetKubernetesDownloadResponse +func (c *ClientWithResponses) GetFleetKubernetesDownloadWithResponse(ctx context.Context, params *GetFleetKubernetesDownloadParams, reqEditors ...RequestEditorFn) (*GetFleetKubernetesDownloadResponse, error) { + rsp, err := c.GetFleetKubernetesDownload(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseGetFleetKubernetesDownloadResponse(rsp) } -// Getter for additional properties for PackageInfo_InstallationInfo_InstalledEs_Item. Returns the specified -// element and whether it was found -func (a PackageInfo_InstallationInfo_InstalledEs_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// PostFleetLogstashApiKeysWithResponse request returning *PostFleetLogstashApiKeysResponse +func (c *ClientWithResponses) PostFleetLogstashApiKeysWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*PostFleetLogstashApiKeysResponse, error) { + rsp, err := c.PostFleetLogstashApiKeys(ctx, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostFleetLogstashApiKeysResponse(rsp) } -// Setter for additional properties for PackageInfo_InstallationInfo_InstalledEs_Item -func (a *PackageInfo_InstallationInfo_InstalledEs_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// PostFleetMessageSigningServiceRotateKeyPairWithResponse request returning *PostFleetMessageSigningServiceRotateKeyPairResponse +func (c *ClientWithResponses) PostFleetMessageSigningServiceRotateKeyPairWithResponse(ctx context.Context, params *PostFleetMessageSigningServiceRotateKeyPairParams, reqEditors ...RequestEditorFn) (*PostFleetMessageSigningServiceRotateKeyPairResponse, error) { + rsp, err := c.PostFleetMessageSigningServiceRotateKeyPair(ctx, params, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePostFleetMessageSigningServiceRotateKeyPairResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_InstalledEs_Item to handle AdditionalProperties -func (a *PackageInfo_InstallationInfo_InstalledEs_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// GetFleetOutputsWithResponse request returning *GetFleetOutputsResponse +func (c *ClientWithResponses) GetFleetOutputsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetOutputsResponse, error) { + rsp, err := c.GetFleetOutputs(ctx, reqEditors...) if err != nil { - return err + return nil, err } + return ParseGetFleetOutputsResponse(rsp) +} - if raw, found := object["deferred"]; found { - err = json.Unmarshal(raw, &a.Deferred) - if err != nil { - return fmt.Errorf("error reading 'deferred': %w", err) - } - delete(object, "deferred") +// PostFleetOutputsWithBodyWithResponse request with arbitrary body returning *PostFleetOutputsResponse +func (c *ClientWithResponses) PostFleetOutputsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetOutputsResponse, error) { + rsp, err := c.PostFleetOutputsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetOutputsResponse(rsp) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +func (c *ClientWithResponses) PostFleetOutputsWithResponse(ctx context.Context, body PostFleetOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetOutputsResponse, error) { + rsp, err := c.PostFleetOutputs(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetOutputsResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +// DeleteFleetOutputsOutputidWithResponse request returning *DeleteFleetOutputsOutputidResponse +func (c *ClientWithResponses) DeleteFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*DeleteFleetOutputsOutputidResponse, error) { + rsp, err := c.DeleteFleetOutputsOutputid(ctx, outputId, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteFleetOutputsOutputidResponse(rsp) +} - if raw, found := object["version"]; found { - err = json.Unmarshal(raw, &a.Version) - if err != nil { - return fmt.Errorf("error reading 'version': %w", err) - } - delete(object, "version") +// GetFleetOutputsOutputidWithResponse request returning *GetFleetOutputsOutputidResponse +func (c *ClientWithResponses) GetFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*GetFleetOutputsOutputidResponse, error) { + rsp, err := c.GetFleetOutputsOutputid(ctx, outputId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetOutputsOutputidResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// PutFleetOutputsOutputidWithBodyWithResponse request with arbitrary body returning *PutFleetOutputsOutputidResponse +func (c *ClientWithResponses) PutFleetOutputsOutputidWithBodyWithResponse(ctx context.Context, outputId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetOutputsOutputidResponse, error) { + rsp, err := c.PutFleetOutputsOutputidWithBody(ctx, outputId, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePutFleetOutputsOutputidResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_InstalledEs_Item to handle AdditionalProperties -func (a PackageInfo_InstallationInfo_InstalledEs_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - if a.Deferred != nil { - object["deferred"], err = json.Marshal(a.Deferred) - if err != nil { - return nil, fmt.Errorf("error marshaling 'deferred': %w", err) - } +func (c *ClientWithResponses) PutFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, body PutFleetOutputsOutputidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetOutputsOutputidResponse, error) { + rsp, err := c.PutFleetOutputsOutputid(ctx, outputId, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutFleetOutputsOutputidResponse(rsp) +} - object["id"], err = json.Marshal(a.Id) +// GetFleetOutputsOutputidHealthWithResponse request returning *GetFleetOutputsOutputidHealthResponse +func (c *ClientWithResponses) GetFleetOutputsOutputidHealthWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*GetFleetOutputsOutputidHealthResponse, error) { + rsp, err := c.GetFleetOutputsOutputidHealth(ctx, outputId, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) + return nil, err } + return ParseGetFleetOutputsOutputidHealthResponse(rsp) +} - object["type"], err = json.Marshal(a.Type) +// GetFleetPackagePoliciesWithResponse request returning *GetFleetPackagePoliciesResponse +func (c *ClientWithResponses) GetFleetPackagePoliciesWithResponse(ctx context.Context, params *GetFleetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*GetFleetPackagePoliciesResponse, error) { + rsp, err := c.GetFleetPackagePolicies(ctx, params, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) + return nil, err } + return ParseGetFleetPackagePoliciesResponse(rsp) +} - if a.Version != nil { - object["version"], err = json.Marshal(a.Version) - if err != nil { - return nil, fmt.Errorf("error marshaling 'version': %w", err) - } +// PostFleetPackagePoliciesWithBodyWithResponse request with arbitrary body returning *PostFleetPackagePoliciesResponse +func (c *ClientWithResponses) PostFleetPackagePoliciesWithBodyWithResponse(ctx context.Context, params *PostFleetPackagePoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesResponse, error) { + rsp, err := c.PostFleetPackagePoliciesWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetPackagePoliciesResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) PostFleetPackagePoliciesWithResponse(ctx context.Context, params *PostFleetPackagePoliciesParams, body PostFleetPackagePoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesResponse, error) { + rsp, err := c.PostFleetPackagePolicies(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostFleetPackagePoliciesResponse(rsp) } -// Getter for additional properties for PackageInfo_InstallationInfo_InstalledKibana_Item. Returns the specified -// element and whether it was found -func (a PackageInfo_InstallationInfo_InstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// PostFleetPackagePoliciesBulkGetWithBodyWithResponse request with arbitrary body returning *PostFleetPackagePoliciesBulkGetResponse +func (c *ClientWithResponses) PostFleetPackagePoliciesBulkGetWithBodyWithResponse(ctx context.Context, params *PostFleetPackagePoliciesBulkGetParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesBulkGetResponse, error) { + rsp, err := c.PostFleetPackagePoliciesBulkGetWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostFleetPackagePoliciesBulkGetResponse(rsp) } -// Setter for additional properties for PackageInfo_InstallationInfo_InstalledKibana_Item -func (a *PackageInfo_InstallationInfo_InstalledKibana_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) PostFleetPackagePoliciesBulkGetWithResponse(ctx context.Context, params *PostFleetPackagePoliciesBulkGetParams, body PostFleetPackagePoliciesBulkGetJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesBulkGetResponse, error) { + rsp, err := c.PostFleetPackagePoliciesBulkGet(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePostFleetPackagePoliciesBulkGetResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties -func (a *PackageInfo_InstallationInfo_InstalledKibana_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// PostFleetPackagePoliciesDeleteWithBodyWithResponse request with arbitrary body returning *PostFleetPackagePoliciesDeleteResponse +func (c *ClientWithResponses) PostFleetPackagePoliciesDeleteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesDeleteResponse, error) { + rsp, err := c.PostFleetPackagePoliciesDeleteWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePostFleetPackagePoliciesDeleteResponse(rsp) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +func (c *ClientWithResponses) PostFleetPackagePoliciesDeleteWithResponse(ctx context.Context, body PostFleetPackagePoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesDeleteResponse, error) { + rsp, err := c.PostFleetPackagePoliciesDelete(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetPackagePoliciesDeleteResponse(rsp) +} - if raw, found := object["originId"]; found { - err = json.Unmarshal(raw, &a.OriginId) - if err != nil { - return fmt.Errorf("error reading 'originId': %w", err) - } - delete(object, "originId") +// PostFleetPackagePoliciesUpgradeWithBodyWithResponse request with arbitrary body returning *PostFleetPackagePoliciesUpgradeResponse +func (c *ClientWithResponses) PostFleetPackagePoliciesUpgradeWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesUpgradeResponse, error) { + rsp, err := c.PostFleetPackagePoliciesUpgradeWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetPackagePoliciesUpgradeResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +func (c *ClientWithResponses) PostFleetPackagePoliciesUpgradeWithResponse(ctx context.Context, body PostFleetPackagePoliciesUpgradeJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesUpgradeResponse, error) { + rsp, err := c.PostFleetPackagePoliciesUpgrade(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetPackagePoliciesUpgradeResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// PostFleetPackagePoliciesUpgradeDryrunWithBodyWithResponse request with arbitrary body returning *PostFleetPackagePoliciesUpgradeDryrunResponse +func (c *ClientWithResponses) PostFleetPackagePoliciesUpgradeDryrunWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesUpgradeDryrunResponse, error) { + rsp, err := c.PostFleetPackagePoliciesUpgradeDryrunWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePostFleetPackagePoliciesUpgradeDryrunResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties -func (a PackageInfo_InstallationInfo_InstalledKibana_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - object["id"], err = json.Marshal(a.Id) +func (c *ClientWithResponses) PostFleetPackagePoliciesUpgradeDryrunWithResponse(ctx context.Context, body PostFleetPackagePoliciesUpgradeDryrunJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesUpgradeDryrunResponse, error) { + rsp, err := c.PostFleetPackagePoliciesUpgradeDryrun(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) + return nil, err } + return ParsePostFleetPackagePoliciesUpgradeDryrunResponse(rsp) +} - if a.OriginId != nil { - object["originId"], err = json.Marshal(a.OriginId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'originId': %w", err) - } +// DeleteFleetPackagePoliciesPackagepolicyidWithResponse request returning *DeleteFleetPackagePoliciesPackagepolicyidResponse +func (c *ClientWithResponses) DeleteFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *DeleteFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*DeleteFleetPackagePoliciesPackagepolicyidResponse, error) { + rsp, err := c.DeleteFleetPackagePoliciesPackagepolicyid(ctx, packagePolicyId, params, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteFleetPackagePoliciesPackagepolicyidResponse(rsp) +} - object["type"], err = json.Marshal(a.Type) +// GetFleetPackagePoliciesPackagepolicyidWithResponse request returning *GetFleetPackagePoliciesPackagepolicyidResponse +func (c *ClientWithResponses) GetFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *GetFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*GetFleetPackagePoliciesPackagepolicyidResponse, error) { + rsp, err := c.GetFleetPackagePoliciesPackagepolicyid(ctx, packagePolicyId, params, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) + return nil, err } + return ParseGetFleetPackagePoliciesPackagepolicyidResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// PutFleetPackagePoliciesPackagepolicyidWithBodyWithResponse request with arbitrary body returning *PutFleetPackagePoliciesPackagepolicyidResponse +func (c *ClientWithResponses) PutFleetPackagePoliciesPackagepolicyidWithBodyWithResponse(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetPackagePoliciesPackagepolicyidResponse, error) { + rsp, err := c.PutFleetPackagePoliciesPackagepolicyidWithBody(ctx, packagePolicyId, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePutFleetPackagePoliciesPackagepolicyidResponse(rsp) } -// Getter for additional properties for PackageInfo_InstallationInfo_LatestExecutedState. Returns the specified -// element and whether it was found -func (a PackageInfo_InstallationInfo_LatestExecutedState) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) PutFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, body PutFleetPackagePoliciesPackagepolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetPackagePoliciesPackagepolicyidResponse, error) { + rsp, err := c.PutFleetPackagePoliciesPackagepolicyid(ctx, packagePolicyId, params, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePutFleetPackagePoliciesPackagepolicyidResponse(rsp) } -// Setter for additional properties for PackageInfo_InstallationInfo_LatestExecutedState -func (a *PackageInfo_InstallationInfo_LatestExecutedState) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// GetFleetProxiesWithResponse request returning *GetFleetProxiesResponse +func (c *ClientWithResponses) GetFleetProxiesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetProxiesResponse, error) { + rsp, err := c.GetFleetProxies(ctx, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseGetFleetProxiesResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_LatestExecutedState to handle AdditionalProperties -func (a *PackageInfo_InstallationInfo_LatestExecutedState) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// PostFleetProxiesWithBodyWithResponse request with arbitrary body returning *PostFleetProxiesResponse +func (c *ClientWithResponses) PostFleetProxiesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetProxiesResponse, error) { + rsp, err := c.PostFleetProxiesWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePostFleetProxiesResponse(rsp) +} - if raw, found := object["error"]; found { - err = json.Unmarshal(raw, &a.Error) - if err != nil { - return fmt.Errorf("error reading 'error': %w", err) - } - delete(object, "error") +func (c *ClientWithResponses) PostFleetProxiesWithResponse(ctx context.Context, body PostFleetProxiesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetProxiesResponse, error) { + rsp, err := c.PostFleetProxies(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetProxiesResponse(rsp) +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// DeleteFleetProxiesItemidWithResponse request returning *DeleteFleetProxiesItemidResponse +func (c *ClientWithResponses) DeleteFleetProxiesItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*DeleteFleetProxiesItemidResponse, error) { + rsp, err := c.DeleteFleetProxiesItemid(ctx, itemId, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteFleetProxiesItemidResponse(rsp) +} - if raw, found := object["started_at"]; found { - err = json.Unmarshal(raw, &a.StartedAt) - if err != nil { - return fmt.Errorf("error reading 'started_at': %w", err) - } - delete(object, "started_at") +// GetFleetProxiesItemidWithResponse request returning *GetFleetProxiesItemidResponse +func (c *ClientWithResponses) GetFleetProxiesItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*GetFleetProxiesItemidResponse, error) { + rsp, err := c.GetFleetProxiesItemid(ctx, itemId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetProxiesItemidResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// PutFleetProxiesItemidWithBodyWithResponse request with arbitrary body returning *PutFleetProxiesItemidResponse +func (c *ClientWithResponses) PutFleetProxiesItemidWithBodyWithResponse(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetProxiesItemidResponse, error) { + rsp, err := c.PutFleetProxiesItemidWithBody(ctx, itemId, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePutFleetProxiesItemidResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_LatestExecutedState to handle AdditionalProperties -func (a PackageInfo_InstallationInfo_LatestExecutedState) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - if a.Error != nil { - object["error"], err = json.Marshal(a.Error) - if err != nil { - return nil, fmt.Errorf("error marshaling 'error': %w", err) - } +func (c *ClientWithResponses) PutFleetProxiesItemidWithResponse(ctx context.Context, itemId string, body PutFleetProxiesItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetProxiesItemidResponse, error) { + rsp, err := c.PutFleetProxiesItemid(ctx, itemId, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutFleetProxiesItemidResponse(rsp) +} - if a.Name != nil { - object["name"], err = json.Marshal(a.Name) - if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) - } +// GetFleetRemoteSyncedIntegrationsStatusWithResponse request returning *GetFleetRemoteSyncedIntegrationsStatusResponse +func (c *ClientWithResponses) GetFleetRemoteSyncedIntegrationsStatusWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetRemoteSyncedIntegrationsStatusResponse, error) { + rsp, err := c.GetFleetRemoteSyncedIntegrationsStatus(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetRemoteSyncedIntegrationsStatusResponse(rsp) +} - if a.StartedAt != nil { - object["started_at"], err = json.Marshal(a.StartedAt) - if err != nil { - return nil, fmt.Errorf("error marshaling 'started_at': %w", err) - } +// GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusWithResponse request returning *GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusResponse +func (c *ClientWithResponses) GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusResponse, error) { + rsp, err := c.GetFleetRemoteSyncedIntegrationsOutputidRemoteStatus(ctx, outputId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetRemoteSyncedIntegrationsOutputidRemoteStatusResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// PostFleetServiceTokensWithBodyWithResponse request with arbitrary body returning *PostFleetServiceTokensResponse +func (c *ClientWithResponses) PostFleetServiceTokensWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetServiceTokensResponse, error) { + rsp, err := c.PostFleetServiceTokensWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostFleetServiceTokensResponse(rsp) } -// Getter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error. Returns the specified -// element and whether it was found -func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) PostFleetServiceTokensWithResponse(ctx context.Context, body PostFleetServiceTokensJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetServiceTokensResponse, error) { + rsp, err := c.PostFleetServiceTokens(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostFleetServiceTokensResponse(rsp) } -// Setter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error -func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// GetFleetSettingsWithResponse request returning *GetFleetSettingsResponse +func (c *ClientWithResponses) GetFleetSettingsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetSettingsResponse, error) { + rsp, err := c.GetFleetSettings(ctx, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseGetFleetSettingsResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties -func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// PutFleetSettingsWithBodyWithResponse request with arbitrary body returning *PutFleetSettingsResponse +func (c *ClientWithResponses) PutFleetSettingsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetSettingsResponse, error) { + rsp, err := c.PutFleetSettingsWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePutFleetSettingsResponse(rsp) +} - if raw, found := object["message"]; found { - err = json.Unmarshal(raw, &a.Message) - if err != nil { - return fmt.Errorf("error reading 'message': %w", err) - } - delete(object, "message") +func (c *ClientWithResponses) PutFleetSettingsWithResponse(ctx context.Context, body PutFleetSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetSettingsResponse, error) { + rsp, err := c.PutFleetSettings(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutFleetSettingsResponse(rsp) +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// PostFleetSetupWithResponse request returning *PostFleetSetupResponse +func (c *ClientWithResponses) PostFleetSetupWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*PostFleetSetupResponse, error) { + rsp, err := c.PostFleetSetup(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParsePostFleetSetupResponse(rsp) +} - if raw, found := object["stack"]; found { - err = json.Unmarshal(raw, &a.Stack) - if err != nil { - return fmt.Errorf("error reading 'stack': %w", err) - } - delete(object, "stack") +// GetFleetSpaceSettingsWithResponse request returning *GetFleetSpaceSettingsResponse +func (c *ClientWithResponses) GetFleetSpaceSettingsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetSpaceSettingsResponse, error) { + rsp, err := c.GetFleetSpaceSettings(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetSpaceSettingsResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// PutFleetSpaceSettingsWithBodyWithResponse request with arbitrary body returning *PutFleetSpaceSettingsResponse +func (c *ClientWithResponses) PutFleetSpaceSettingsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetSpaceSettingsResponse, error) { + rsp, err := c.PutFleetSpaceSettingsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePutFleetSpaceSettingsResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties -func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Error) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - object["message"], err = json.Marshal(a.Message) +func (c *ClientWithResponses) PutFleetSpaceSettingsWithResponse(ctx context.Context, body PutFleetSpaceSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetSpaceSettingsResponse, error) { + rsp, err := c.PutFleetSpaceSettings(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'message': %w", err) + return nil, err } + return ParsePutFleetSpaceSettingsResponse(rsp) +} - object["name"], err = json.Marshal(a.Name) +// GetFleetUninstallTokensWithResponse request returning *GetFleetUninstallTokensResponse +func (c *ClientWithResponses) GetFleetUninstallTokensWithResponse(ctx context.Context, params *GetFleetUninstallTokensParams, reqEditors ...RequestEditorFn) (*GetFleetUninstallTokensResponse, error) { + rsp, err := c.GetFleetUninstallTokens(ctx, params, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) + return nil, err } + return ParseGetFleetUninstallTokensResponse(rsp) +} - if a.Stack != nil { - object["stack"], err = json.Marshal(a.Stack) - if err != nil { - return nil, fmt.Errorf("error marshaling 'stack': %w", err) - } +// GetFleetUninstallTokensUninstalltokenidWithResponse request returning *GetFleetUninstallTokensUninstalltokenidResponse +func (c *ClientWithResponses) GetFleetUninstallTokensUninstalltokenidWithResponse(ctx context.Context, uninstallTokenId string, reqEditors ...RequestEditorFn) (*GetFleetUninstallTokensUninstalltokenidResponse, error) { + rsp, err := c.GetFleetUninstallTokensUninstalltokenid(ctx, uninstallTokenId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetFleetUninstallTokensUninstalltokenidResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// DeleteListWithResponse request returning *DeleteListResponse +func (c *ClientWithResponses) DeleteListWithResponse(ctx context.Context, params *DeleteListParams, reqEditors ...RequestEditorFn) (*DeleteListResponse, error) { + rsp, err := c.DeleteList(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseDeleteListResponse(rsp) } -// Getter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item. Returns the specified -// element and whether it was found -func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// ReadListWithResponse request returning *ReadListResponse +func (c *ClientWithResponses) ReadListWithResponse(ctx context.Context, params *ReadListParams, reqEditors ...RequestEditorFn) (*ReadListResponse, error) { + rsp, err := c.ReadList(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return + return ParseReadListResponse(rsp) } -// Setter for additional properties for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item -func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// PatchListWithBodyWithResponse request with arbitrary body returning *PatchListResponse +func (c *ClientWithResponses) PatchListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchListResponse, error) { + rsp, err := c.PatchListWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePatchListResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties -func (a *PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +func (c *ClientWithResponses) PatchListWithResponse(ctx context.Context, body PatchListJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchListResponse, error) { + rsp, err := c.PatchList(ctx, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePatchListResponse(rsp) +} - if raw, found := object["created_at"]; found { - err = json.Unmarshal(raw, &a.CreatedAt) - if err != nil { - return fmt.Errorf("error reading 'created_at': %w", err) - } - delete(object, "created_at") +// CreateListWithBodyWithResponse request with arbitrary body returning *CreateListResponse +func (c *ClientWithResponses) CreateListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateListResponse, error) { + rsp, err := c.CreateListWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateListResponse(rsp) +} - if raw, found := object["error"]; found { - err = json.Unmarshal(raw, &a.Error) - if err != nil { - return fmt.Errorf("error reading 'error': %w", err) - } - delete(object, "error") +func (c *ClientWithResponses) CreateListWithResponse(ctx context.Context, body CreateListJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateListResponse, error) { + rsp, err := c.CreateList(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateListResponse(rsp) +} - if raw, found := object["target_version"]; found { - err = json.Unmarshal(raw, &a.TargetVersion) - if err != nil { - return fmt.Errorf("error reading 'target_version': %w", err) - } - delete(object, "target_version") +// UpdateListWithBodyWithResponse request with arbitrary body returning *UpdateListResponse +func (c *ClientWithResponses) UpdateListWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateListResponse, error) { + rsp, err := c.UpdateListWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateListResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) UpdateListWithResponse(ctx context.Context, body UpdateListJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateListResponse, error) { + rsp, err := c.UpdateList(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseUpdateListResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties -func (a PackageInfo_InstallationInfo_LatestInstallFailedAttempts_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - object["created_at"], err = json.Marshal(a.CreatedAt) +// FindListsWithResponse request returning *FindListsResponse +func (c *ClientWithResponses) FindListsWithResponse(ctx context.Context, params *FindListsParams, reqEditors ...RequestEditorFn) (*FindListsResponse, error) { + rsp, err := c.FindLists(ctx, params, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'created_at': %w", err) + return nil, err } + return ParseFindListsResponse(rsp) +} - object["error"], err = json.Marshal(a.Error) +// DeleteListIndexWithResponse request returning *DeleteListIndexResponse +func (c *ClientWithResponses) DeleteListIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*DeleteListIndexResponse, error) { + rsp, err := c.DeleteListIndex(ctx, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'error': %w", err) + return nil, err } + return ParseDeleteListIndexResponse(rsp) +} - object["target_version"], err = json.Marshal(a.TargetVersion) +// ReadListIndexWithResponse request returning *ReadListIndexResponse +func (c *ClientWithResponses) ReadListIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadListIndexResponse, error) { + rsp, err := c.ReadListIndex(ctx, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'target_version': %w", err) + return nil, err } + return ParseReadListIndexResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// CreateListIndexWithResponse request returning *CreateListIndexResponse +func (c *ClientWithResponses) CreateListIndexWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*CreateListIndexResponse, error) { + rsp, err := c.CreateListIndex(ctx, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseCreateListIndexResponse(rsp) } -// Getter for additional properties for PackageInfo_InstallationInfo. Returns the specified -// element and whether it was found -func (a PackageInfo_InstallationInfo) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// DeleteListItemWithResponse request returning *DeleteListItemResponse +func (c *ClientWithResponses) DeleteListItemWithResponse(ctx context.Context, params *DeleteListItemParams, reqEditors ...RequestEditorFn) (*DeleteListItemResponse, error) { + rsp, err := c.DeleteListItem(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return + return ParseDeleteListItemResponse(rsp) } -// Setter for additional properties for PackageInfo_InstallationInfo -func (a *PackageInfo_InstallationInfo) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// ReadListItemWithResponse request returning *ReadListItemResponse +func (c *ClientWithResponses) ReadListItemWithResponse(ctx context.Context, params *ReadListItemParams, reqEditors ...RequestEditorFn) (*ReadListItemResponse, error) { + rsp, err := c.ReadListItem(ctx, params, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseReadListItemResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo to handle AdditionalProperties -func (a *PackageInfo_InstallationInfo) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// PatchListItemWithBodyWithResponse request with arbitrary body returning *PatchListItemResponse +func (c *ClientWithResponses) PatchListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchListItemResponse, error) { + rsp, err := c.PatchListItemWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePatchListItemResponse(rsp) +} - if raw, found := object["additional_spaces_installed_kibana"]; found { - err = json.Unmarshal(raw, &a.AdditionalSpacesInstalledKibana) - if err != nil { - return fmt.Errorf("error reading 'additional_spaces_installed_kibana': %w", err) - } - delete(object, "additional_spaces_installed_kibana") +func (c *ClientWithResponses) PatchListItemWithResponse(ctx context.Context, body PatchListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchListItemResponse, error) { + rsp, err := c.PatchListItem(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePatchListItemResponse(rsp) +} - if raw, found := object["created_at"]; found { - err = json.Unmarshal(raw, &a.CreatedAt) - if err != nil { - return fmt.Errorf("error reading 'created_at': %w", err) - } - delete(object, "created_at") +// CreateListItemWithBodyWithResponse request with arbitrary body returning *CreateListItemResponse +func (c *ClientWithResponses) CreateListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateListItemResponse, error) { + rsp, err := c.CreateListItemWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateListItemResponse(rsp) +} - if raw, found := object["experimental_data_stream_features"]; found { - err = json.Unmarshal(raw, &a.ExperimentalDataStreamFeatures) - if err != nil { - return fmt.Errorf("error reading 'experimental_data_stream_features': %w", err) - } - delete(object, "experimental_data_stream_features") +func (c *ClientWithResponses) CreateListItemWithResponse(ctx context.Context, body CreateListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateListItemResponse, error) { + rsp, err := c.CreateListItem(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateListItemResponse(rsp) +} - if raw, found := object["install_format_schema_version"]; found { - err = json.Unmarshal(raw, &a.InstallFormatSchemaVersion) - if err != nil { - return fmt.Errorf("error reading 'install_format_schema_version': %w", err) - } - delete(object, "install_format_schema_version") +// UpdateListItemWithBodyWithResponse request with arbitrary body returning *UpdateListItemResponse +func (c *ClientWithResponses) UpdateListItemWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateListItemResponse, error) { + rsp, err := c.UpdateListItemWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateListItemResponse(rsp) +} - if raw, found := object["install_source"]; found { - err = json.Unmarshal(raw, &a.InstallSource) - if err != nil { - return fmt.Errorf("error reading 'install_source': %w", err) - } - delete(object, "install_source") +func (c *ClientWithResponses) UpdateListItemWithResponse(ctx context.Context, body UpdateListItemJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateListItemResponse, error) { + rsp, err := c.UpdateListItem(ctx, body, reqEditors...) + if err != nil { + return nil, err } - - if raw, found := object["install_status"]; found { - err = json.Unmarshal(raw, &a.InstallStatus) - if err != nil { - return fmt.Errorf("error reading 'install_status': %w", err) - } - delete(object, "install_status") + return ParseUpdateListItemResponse(rsp) +} + +// ExportListItemsWithResponse request returning *ExportListItemsResponse +func (c *ClientWithResponses) ExportListItemsWithResponse(ctx context.Context, params *ExportListItemsParams, reqEditors ...RequestEditorFn) (*ExportListItemsResponse, error) { + rsp, err := c.ExportListItems(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseExportListItemsResponse(rsp) +} - if raw, found := object["installed_es"]; found { - err = json.Unmarshal(raw, &a.InstalledEs) - if err != nil { - return fmt.Errorf("error reading 'installed_es': %w", err) - } - delete(object, "installed_es") +// FindListItemsWithResponse request returning *FindListItemsResponse +func (c *ClientWithResponses) FindListItemsWithResponse(ctx context.Context, params *FindListItemsParams, reqEditors ...RequestEditorFn) (*FindListItemsResponse, error) { + rsp, err := c.FindListItems(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseFindListItemsResponse(rsp) +} - if raw, found := object["installed_kibana"]; found { - err = json.Unmarshal(raw, &a.InstalledKibana) - if err != nil { - return fmt.Errorf("error reading 'installed_kibana': %w", err) - } - delete(object, "installed_kibana") +// ImportListItemsWithBodyWithResponse request with arbitrary body returning *ImportListItemsResponse +func (c *ClientWithResponses) ImportListItemsWithBodyWithResponse(ctx context.Context, params *ImportListItemsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ImportListItemsResponse, error) { + rsp, err := c.ImportListItemsWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseImportListItemsResponse(rsp) +} - if raw, found := object["installed_kibana_space_id"]; found { - err = json.Unmarshal(raw, &a.InstalledKibanaSpaceId) - if err != nil { - return fmt.Errorf("error reading 'installed_kibana_space_id': %w", err) - } - delete(object, "installed_kibana_space_id") +// ReadListPrivilegesWithResponse request returning *ReadListPrivilegesResponse +func (c *ClientWithResponses) ReadListPrivilegesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*ReadListPrivilegesResponse, error) { + rsp, err := c.ReadListPrivileges(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseReadListPrivilegesResponse(rsp) +} - if raw, found := object["latest_executed_state"]; found { - err = json.Unmarshal(raw, &a.LatestExecutedState) - if err != nil { - return fmt.Errorf("error reading 'latest_executed_state': %w", err) - } - delete(object, "latest_executed_state") +// DeleteLogstashPipelineWithResponse request returning *DeleteLogstashPipelineResponse +func (c *ClientWithResponses) DeleteLogstashPipelineWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteLogstashPipelineResponse, error) { + rsp, err := c.DeleteLogstashPipeline(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteLogstashPipelineResponse(rsp) +} - if raw, found := object["latest_install_failed_attempts"]; found { - err = json.Unmarshal(raw, &a.LatestInstallFailedAttempts) - if err != nil { - return fmt.Errorf("error reading 'latest_install_failed_attempts': %w", err) - } - delete(object, "latest_install_failed_attempts") +// GetLogstashPipelineWithResponse request returning *GetLogstashPipelineResponse +func (c *ClientWithResponses) GetLogstashPipelineWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetLogstashPipelineResponse, error) { + rsp, err := c.GetLogstashPipeline(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseGetLogstashPipelineResponse(rsp) +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// PutLogstashPipelineWithBodyWithResponse request with arbitrary body returning *PutLogstashPipelineResponse +func (c *ClientWithResponses) PutLogstashPipelineWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutLogstashPipelineResponse, error) { + rsp, err := c.PutLogstashPipelineWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutLogstashPipelineResponse(rsp) +} - if raw, found := object["namespaces"]; found { - err = json.Unmarshal(raw, &a.Namespaces) - if err != nil { - return fmt.Errorf("error reading 'namespaces': %w", err) - } - delete(object, "namespaces") +func (c *ClientWithResponses) PutLogstashPipelineWithResponse(ctx context.Context, id string, body PutLogstashPipelineJSONRequestBody, reqEditors ...RequestEditorFn) (*PutLogstashPipelineResponse, error) { + rsp, err := c.PutLogstashPipeline(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutLogstashPipelineResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +// GetLogstashPipelinesWithResponse request returning *GetLogstashPipelinesResponse +func (c *ClientWithResponses) GetLogstashPipelinesWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetLogstashPipelinesResponse, error) { + rsp, err := c.GetLogstashPipelines(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetLogstashPipelinesResponse(rsp) +} - if raw, found := object["updated_at"]; found { - err = json.Unmarshal(raw, &a.UpdatedAt) - if err != nil { - return fmt.Errorf("error reading 'updated_at': %w", err) - } - delete(object, "updated_at") +// PostMaintenanceWindowIdArchiveWithResponse request returning *PostMaintenanceWindowIdArchiveResponse +func (c *ClientWithResponses) PostMaintenanceWindowIdArchiveWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowIdArchiveResponse, error) { + rsp, err := c.PostMaintenanceWindowIdArchive(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParsePostMaintenanceWindowIdArchiveResponse(rsp) +} - if raw, found := object["verification_key_id"]; found { - err = json.Unmarshal(raw, &a.VerificationKeyId) - if err != nil { - return fmt.Errorf("error reading 'verification_key_id': %w", err) - } - delete(object, "verification_key_id") +// PostMaintenanceWindowIdUnarchiveWithResponse request returning *PostMaintenanceWindowIdUnarchiveResponse +func (c *ClientWithResponses) PostMaintenanceWindowIdUnarchiveWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowIdUnarchiveResponse, error) { + rsp, err := c.PostMaintenanceWindowIdUnarchive(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParsePostMaintenanceWindowIdUnarchiveResponse(rsp) +} - if raw, found := object["verification_status"]; found { - err = json.Unmarshal(raw, &a.VerificationStatus) - if err != nil { - return fmt.Errorf("error reading 'verification_status': %w", err) - } - delete(object, "verification_status") +// MlSyncWithResponse request returning *MlSyncResponse +func (c *ClientWithResponses) MlSyncWithResponse(ctx context.Context, params *MlSyncParams, reqEditors ...RequestEditorFn) (*MlSyncResponse, error) { + rsp, err := c.MlSync(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseMlSyncResponse(rsp) +} - if raw, found := object["version"]; found { - err = json.Unmarshal(raw, &a.Version) - if err != nil { - return fmt.Errorf("error reading 'version': %w", err) - } - delete(object, "version") +// DeleteNoteWithBodyWithResponse request with arbitrary body returning *DeleteNoteResponse +func (c *ClientWithResponses) DeleteNoteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteNoteResponse, error) { + rsp, err := c.DeleteNoteWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteNoteResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) DeleteNoteWithResponse(ctx context.Context, body DeleteNoteJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteNoteResponse, error) { + rsp, err := c.DeleteNote(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseDeleteNoteResponse(rsp) } -// Override default JSON handling for PackageInfo_InstallationInfo to handle AdditionalProperties -func (a PackageInfo_InstallationInfo) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// GetNotesWithResponse request returning *GetNotesResponse +func (c *ClientWithResponses) GetNotesWithResponse(ctx context.Context, params *GetNotesParams, reqEditors ...RequestEditorFn) (*GetNotesResponse, error) { + rsp, err := c.GetNotes(ctx, params, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetNotesResponse(rsp) +} - if a.AdditionalSpacesInstalledKibana != nil { - object["additional_spaces_installed_kibana"], err = json.Marshal(a.AdditionalSpacesInstalledKibana) - if err != nil { - return nil, fmt.Errorf("error marshaling 'additional_spaces_installed_kibana': %w", err) - } +// PersistNoteRouteWithBodyWithResponse request with arbitrary body returning *PersistNoteRouteResponse +func (c *ClientWithResponses) PersistNoteRouteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PersistNoteRouteResponse, error) { + rsp, err := c.PersistNoteRouteWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePersistNoteRouteResponse(rsp) +} - if a.CreatedAt != nil { - object["created_at"], err = json.Marshal(a.CreatedAt) - if err != nil { - return nil, fmt.Errorf("error marshaling 'created_at': %w", err) - } +func (c *ClientWithResponses) PersistNoteRouteWithResponse(ctx context.Context, body PersistNoteRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*PersistNoteRouteResponse, error) { + rsp, err := c.PersistNoteRoute(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePersistNoteRouteResponse(rsp) +} - if a.ExperimentalDataStreamFeatures != nil { - object["experimental_data_stream_features"], err = json.Marshal(a.ExperimentalDataStreamFeatures) - if err != nil { - return nil, fmt.Errorf("error marshaling 'experimental_data_stream_features': %w", err) - } +// ObservabilityAiAssistantChatCompleteWithBodyWithResponse request with arbitrary body returning *ObservabilityAiAssistantChatCompleteResponse +func (c *ClientWithResponses) ObservabilityAiAssistantChatCompleteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ObservabilityAiAssistantChatCompleteResponse, error) { + rsp, err := c.ObservabilityAiAssistantChatCompleteWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseObservabilityAiAssistantChatCompleteResponse(rsp) +} - if a.InstallFormatSchemaVersion != nil { - object["install_format_schema_version"], err = json.Marshal(a.InstallFormatSchemaVersion) - if err != nil { - return nil, fmt.Errorf("error marshaling 'install_format_schema_version': %w", err) - } +func (c *ClientWithResponses) ObservabilityAiAssistantChatCompleteWithResponse(ctx context.Context, body ObservabilityAiAssistantChatCompleteJSONRequestBody, reqEditors ...RequestEditorFn) (*ObservabilityAiAssistantChatCompleteResponse, error) { + rsp, err := c.ObservabilityAiAssistantChatComplete(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseObservabilityAiAssistantChatCompleteResponse(rsp) +} - object["install_source"], err = json.Marshal(a.InstallSource) +// OsqueryFindLiveQueriesWithResponse request returning *OsqueryFindLiveQueriesResponse +func (c *ClientWithResponses) OsqueryFindLiveQueriesWithResponse(ctx context.Context, params *OsqueryFindLiveQueriesParams, reqEditors ...RequestEditorFn) (*OsqueryFindLiveQueriesResponse, error) { + rsp, err := c.OsqueryFindLiveQueries(ctx, params, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'install_source': %w", err) + return nil, err } + return ParseOsqueryFindLiveQueriesResponse(rsp) +} - object["install_status"], err = json.Marshal(a.InstallStatus) +// OsqueryCreateLiveQueryWithBodyWithResponse request with arbitrary body returning *OsqueryCreateLiveQueryResponse +func (c *ClientWithResponses) OsqueryCreateLiveQueryWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*OsqueryCreateLiveQueryResponse, error) { + rsp, err := c.OsqueryCreateLiveQueryWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'install_status': %w", err) + return nil, err } + return ParseOsqueryCreateLiveQueryResponse(rsp) +} - object["installed_es"], err = json.Marshal(a.InstalledEs) +func (c *ClientWithResponses) OsqueryCreateLiveQueryWithResponse(ctx context.Context, body OsqueryCreateLiveQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*OsqueryCreateLiveQueryResponse, error) { + rsp, err := c.OsqueryCreateLiveQuery(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'installed_es': %w", err) + return nil, err } + return ParseOsqueryCreateLiveQueryResponse(rsp) +} - object["installed_kibana"], err = json.Marshal(a.InstalledKibana) +// OsqueryGetLiveQueryDetailsWithResponse request returning *OsqueryGetLiveQueryDetailsResponse +func (c *ClientWithResponses) OsqueryGetLiveQueryDetailsWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*OsqueryGetLiveQueryDetailsResponse, error) { + rsp, err := c.OsqueryGetLiveQueryDetails(ctx, id, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'installed_kibana': %w", err) + return nil, err } + return ParseOsqueryGetLiveQueryDetailsResponse(rsp) +} - if a.InstalledKibanaSpaceId != nil { - object["installed_kibana_space_id"], err = json.Marshal(a.InstalledKibanaSpaceId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'installed_kibana_space_id': %w", err) - } +// OsqueryGetLiveQueryResultsWithResponse request returning *OsqueryGetLiveQueryResultsResponse +func (c *ClientWithResponses) OsqueryGetLiveQueryResultsWithResponse(ctx context.Context, id string, actionId string, params *OsqueryGetLiveQueryResultsParams, reqEditors ...RequestEditorFn) (*OsqueryGetLiveQueryResultsResponse, error) { + rsp, err := c.OsqueryGetLiveQueryResults(ctx, id, actionId, params, reqEditors...) + if err != nil { + return nil, err } + return ParseOsqueryGetLiveQueryResultsResponse(rsp) +} - if a.LatestExecutedState != nil { - object["latest_executed_state"], err = json.Marshal(a.LatestExecutedState) - if err != nil { - return nil, fmt.Errorf("error marshaling 'latest_executed_state': %w", err) - } +// OsqueryFindPacksWithResponse request returning *OsqueryFindPacksResponse +func (c *ClientWithResponses) OsqueryFindPacksWithResponse(ctx context.Context, params *OsqueryFindPacksParams, reqEditors ...RequestEditorFn) (*OsqueryFindPacksResponse, error) { + rsp, err := c.OsqueryFindPacks(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseOsqueryFindPacksResponse(rsp) +} - if a.LatestInstallFailedAttempts != nil { - object["latest_install_failed_attempts"], err = json.Marshal(a.LatestInstallFailedAttempts) - if err != nil { - return nil, fmt.Errorf("error marshaling 'latest_install_failed_attempts': %w", err) - } +// OsqueryCreatePacksWithBodyWithResponse request with arbitrary body returning *OsqueryCreatePacksResponse +func (c *ClientWithResponses) OsqueryCreatePacksWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*OsqueryCreatePacksResponse, error) { + rsp, err := c.OsqueryCreatePacksWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseOsqueryCreatePacksResponse(rsp) +} - object["name"], err = json.Marshal(a.Name) +func (c *ClientWithResponses) OsqueryCreatePacksWithResponse(ctx context.Context, body OsqueryCreatePacksJSONRequestBody, reqEditors ...RequestEditorFn) (*OsqueryCreatePacksResponse, error) { + rsp, err := c.OsqueryCreatePacks(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) + return nil, err } + return ParseOsqueryCreatePacksResponse(rsp) +} - if a.Namespaces != nil { - object["namespaces"], err = json.Marshal(a.Namespaces) - if err != nil { - return nil, fmt.Errorf("error marshaling 'namespaces': %w", err) - } +// OsqueryDeletePacksWithResponse request returning *OsqueryDeletePacksResponse +func (c *ClientWithResponses) OsqueryDeletePacksWithResponse(ctx context.Context, id SecurityOsqueryAPIPackId, reqEditors ...RequestEditorFn) (*OsqueryDeletePacksResponse, error) { + rsp, err := c.OsqueryDeletePacks(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseOsqueryDeletePacksResponse(rsp) +} - object["type"], err = json.Marshal(a.Type) +// OsqueryGetPacksDetailsWithResponse request returning *OsqueryGetPacksDetailsResponse +func (c *ClientWithResponses) OsqueryGetPacksDetailsWithResponse(ctx context.Context, id SecurityOsqueryAPIPackId, reqEditors ...RequestEditorFn) (*OsqueryGetPacksDetailsResponse, error) { + rsp, err := c.OsqueryGetPacksDetails(ctx, id, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) + return nil, err } + return ParseOsqueryGetPacksDetailsResponse(rsp) +} - if a.UpdatedAt != nil { - object["updated_at"], err = json.Marshal(a.UpdatedAt) - if err != nil { - return nil, fmt.Errorf("error marshaling 'updated_at': %w", err) - } +// OsqueryUpdatePacksWithBodyWithResponse request with arbitrary body returning *OsqueryUpdatePacksResponse +func (c *ClientWithResponses) OsqueryUpdatePacksWithBodyWithResponse(ctx context.Context, id SecurityOsqueryAPIPackId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*OsqueryUpdatePacksResponse, error) { + rsp, err := c.OsqueryUpdatePacksWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseOsqueryUpdatePacksResponse(rsp) +} - if a.VerificationKeyId != nil { - object["verification_key_id"], err = json.Marshal(a.VerificationKeyId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'verification_key_id': %w", err) - } +func (c *ClientWithResponses) OsqueryUpdatePacksWithResponse(ctx context.Context, id SecurityOsqueryAPIPackId, body OsqueryUpdatePacksJSONRequestBody, reqEditors ...RequestEditorFn) (*OsqueryUpdatePacksResponse, error) { + rsp, err := c.OsqueryUpdatePacks(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParseOsqueryUpdatePacksResponse(rsp) +} - object["verification_status"], err = json.Marshal(a.VerificationStatus) +// OsqueryFindSavedQueriesWithResponse request returning *OsqueryFindSavedQueriesResponse +func (c *ClientWithResponses) OsqueryFindSavedQueriesWithResponse(ctx context.Context, params *OsqueryFindSavedQueriesParams, reqEditors ...RequestEditorFn) (*OsqueryFindSavedQueriesResponse, error) { + rsp, err := c.OsqueryFindSavedQueries(ctx, params, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'verification_status': %w", err) + return nil, err } + return ParseOsqueryFindSavedQueriesResponse(rsp) +} - object["version"], err = json.Marshal(a.Version) +// OsqueryCreateSavedQueryWithBodyWithResponse request with arbitrary body returning *OsqueryCreateSavedQueryResponse +func (c *ClientWithResponses) OsqueryCreateSavedQueryWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*OsqueryCreateSavedQueryResponse, error) { + rsp, err := c.OsqueryCreateSavedQueryWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'version': %w", err) + return nil, err } + return ParseOsqueryCreateSavedQueryResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) OsqueryCreateSavedQueryWithResponse(ctx context.Context, body OsqueryCreateSavedQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*OsqueryCreateSavedQueryResponse, error) { + rsp, err := c.OsqueryCreateSavedQuery(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseOsqueryCreateSavedQueryResponse(rsp) } -// Getter for additional properties for PackageInfo_Owner. Returns the specified -// element and whether it was found -func (a PackageInfo_Owner) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// OsqueryDeleteSavedQueryWithResponse request returning *OsqueryDeleteSavedQueryResponse +func (c *ClientWithResponses) OsqueryDeleteSavedQueryWithResponse(ctx context.Context, id SecurityOsqueryAPISavedQueryId, reqEditors ...RequestEditorFn) (*OsqueryDeleteSavedQueryResponse, error) { + rsp, err := c.OsqueryDeleteSavedQuery(ctx, id, reqEditors...) + if err != nil { + return nil, err } - return + return ParseOsqueryDeleteSavedQueryResponse(rsp) } -// Setter for additional properties for PackageInfo_Owner -func (a *PackageInfo_Owner) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// OsqueryGetSavedQueryDetailsWithResponse request returning *OsqueryGetSavedQueryDetailsResponse +func (c *ClientWithResponses) OsqueryGetSavedQueryDetailsWithResponse(ctx context.Context, id SecurityOsqueryAPISavedQueryId, reqEditors ...RequestEditorFn) (*OsqueryGetSavedQueryDetailsResponse, error) { + rsp, err := c.OsqueryGetSavedQueryDetails(ctx, id, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseOsqueryGetSavedQueryDetailsResponse(rsp) } -// Override default JSON handling for PackageInfo_Owner to handle AdditionalProperties -func (a *PackageInfo_Owner) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// OsqueryUpdateSavedQueryWithBodyWithResponse request with arbitrary body returning *OsqueryUpdateSavedQueryResponse +func (c *ClientWithResponses) OsqueryUpdateSavedQueryWithBodyWithResponse(ctx context.Context, id SecurityOsqueryAPISavedQueryId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*OsqueryUpdateSavedQueryResponse, error) { + rsp, err := c.OsqueryUpdateSavedQueryWithBody(ctx, id, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseOsqueryUpdateSavedQueryResponse(rsp) +} - if raw, found := object["github"]; found { - err = json.Unmarshal(raw, &a.Github) - if err != nil { - return fmt.Errorf("error reading 'github': %w", err) - } - delete(object, "github") +func (c *ClientWithResponses) OsqueryUpdateSavedQueryWithResponse(ctx context.Context, id SecurityOsqueryAPISavedQueryId, body OsqueryUpdateSavedQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*OsqueryUpdateSavedQueryResponse, error) { + rsp, err := c.OsqueryUpdateSavedQuery(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParseOsqueryUpdateSavedQueryResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +// PersistPinnedEventRouteWithBodyWithResponse request with arbitrary body returning *PersistPinnedEventRouteResponse +func (c *ClientWithResponses) PersistPinnedEventRouteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PersistPinnedEventRouteResponse, error) { + rsp, err := c.PersistPinnedEventRouteWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePersistPinnedEventRouteResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PersistPinnedEventRouteWithResponse(ctx context.Context, body PersistPinnedEventRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*PersistPinnedEventRouteResponse, error) { + rsp, err := c.PersistPinnedEventRoute(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePersistPinnedEventRouteResponse(rsp) } -// Override default JSON handling for PackageInfo_Owner to handle AdditionalProperties -func (a PackageInfo_Owner) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// CleanUpRiskEngineWithResponse request returning *CleanUpRiskEngineResponse +func (c *ClientWithResponses) CleanUpRiskEngineWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*CleanUpRiskEngineResponse, error) { + rsp, err := c.CleanUpRiskEngine(ctx, reqEditors...) + if err != nil { + return nil, err + } + return ParseCleanUpRiskEngineResponse(rsp) +} - if a.Github != nil { - object["github"], err = json.Marshal(a.Github) - if err != nil { - return nil, fmt.Errorf("error marshaling 'github': %w", err) - } +// ConfigureRiskEngineSavedObjectWithBodyWithResponse request with arbitrary body returning *ConfigureRiskEngineSavedObjectResponse +func (c *ClientWithResponses) ConfigureRiskEngineSavedObjectWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ConfigureRiskEngineSavedObjectResponse, error) { + rsp, err := c.ConfigureRiskEngineSavedObjectWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseConfigureRiskEngineSavedObjectResponse(rsp) +} - if a.Type != nil { - object["type"], err = json.Marshal(a.Type) - if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) - } +func (c *ClientWithResponses) ConfigureRiskEngineSavedObjectWithResponse(ctx context.Context, body ConfigureRiskEngineSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*ConfigureRiskEngineSavedObjectResponse, error) { + rsp, err := c.ConfigureRiskEngineSavedObject(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseConfigureRiskEngineSavedObjectResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// ScheduleRiskEngineNowWithBodyWithResponse request with arbitrary body returning *ScheduleRiskEngineNowResponse +func (c *ClientWithResponses) ScheduleRiskEngineNowWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ScheduleRiskEngineNowResponse, error) { + rsp, err := c.ScheduleRiskEngineNowWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseScheduleRiskEngineNowResponse(rsp) } -// Getter for additional properties for PackageInfo_Source. Returns the specified -// element and whether it was found -func (a PackageInfo_Source) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) ScheduleRiskEngineNowWithResponse(ctx context.Context, body ScheduleRiskEngineNowJSONRequestBody, reqEditors ...RequestEditorFn) (*ScheduleRiskEngineNowResponse, error) { + rsp, err := c.ScheduleRiskEngineNow(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseScheduleRiskEngineNowResponse(rsp) } -// Setter for additional properties for PackageInfo_Source -func (a *PackageInfo_Source) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// BulkCreateSavedObjectsWithBodyWithResponse request with arbitrary body returning *BulkCreateSavedObjectsResponse +func (c *ClientWithResponses) BulkCreateSavedObjectsWithBodyWithResponse(ctx context.Context, params *BulkCreateSavedObjectsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkCreateSavedObjectsResponse, error) { + rsp, err := c.BulkCreateSavedObjectsWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseBulkCreateSavedObjectsResponse(rsp) } -// Override default JSON handling for PackageInfo_Source to handle AdditionalProperties -func (a *PackageInfo_Source) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +func (c *ClientWithResponses) BulkCreateSavedObjectsWithResponse(ctx context.Context, params *BulkCreateSavedObjectsParams, body BulkCreateSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkCreateSavedObjectsResponse, error) { + rsp, err := c.BulkCreateSavedObjects(ctx, params, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseBulkCreateSavedObjectsResponse(rsp) +} - if raw, found := object["license"]; found { - err = json.Unmarshal(raw, &a.License) - if err != nil { - return fmt.Errorf("error reading 'license': %w", err) - } - delete(object, "license") +// BulkDeleteSavedObjectsWithBodyWithResponse request with arbitrary body returning *BulkDeleteSavedObjectsResponse +func (c *ClientWithResponses) BulkDeleteSavedObjectsWithBodyWithResponse(ctx context.Context, params *BulkDeleteSavedObjectsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkDeleteSavedObjectsResponse, error) { + rsp, err := c.BulkDeleteSavedObjectsWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseBulkDeleteSavedObjectsResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) BulkDeleteSavedObjectsWithResponse(ctx context.Context, params *BulkDeleteSavedObjectsParams, body BulkDeleteSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkDeleteSavedObjectsResponse, error) { + rsp, err := c.BulkDeleteSavedObjects(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseBulkDeleteSavedObjectsResponse(rsp) } -// Override default JSON handling for PackageInfo_Source to handle AdditionalProperties -func (a PackageInfo_Source) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// BulkGetSavedObjectsWithBodyWithResponse request with arbitrary body returning *BulkGetSavedObjectsResponse +func (c *ClientWithResponses) BulkGetSavedObjectsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkGetSavedObjectsResponse, error) { + rsp, err := c.BulkGetSavedObjectsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseBulkGetSavedObjectsResponse(rsp) +} - object["license"], err = json.Marshal(a.License) +func (c *ClientWithResponses) BulkGetSavedObjectsWithResponse(ctx context.Context, body BulkGetSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkGetSavedObjectsResponse, error) { + rsp, err := c.BulkGetSavedObjects(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'license': %w", err) + return nil, err } + return ParseBulkGetSavedObjectsResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// BulkResolveSavedObjectsWithBodyWithResponse request with arbitrary body returning *BulkResolveSavedObjectsResponse +func (c *ClientWithResponses) BulkResolveSavedObjectsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkResolveSavedObjectsResponse, error) { + rsp, err := c.BulkResolveSavedObjectsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseBulkResolveSavedObjectsResponse(rsp) } -// Getter for additional properties for PackageListItem. Returns the specified -// element and whether it was found -func (a PackageListItem) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) BulkResolveSavedObjectsWithResponse(ctx context.Context, body BulkResolveSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkResolveSavedObjectsResponse, error) { + rsp, err := c.BulkResolveSavedObjects(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseBulkResolveSavedObjectsResponse(rsp) } -// Setter for additional properties for PackageListItem -func (a *PackageListItem) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// BulkUpdateSavedObjectsWithBodyWithResponse request with arbitrary body returning *BulkUpdateSavedObjectsResponse +func (c *ClientWithResponses) BulkUpdateSavedObjectsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkUpdateSavedObjectsResponse, error) { + rsp, err := c.BulkUpdateSavedObjectsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseBulkUpdateSavedObjectsResponse(rsp) } -// Override default JSON handling for PackageListItem to handle AdditionalProperties -func (a *PackageListItem) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +func (c *ClientWithResponses) BulkUpdateSavedObjectsWithResponse(ctx context.Context, body BulkUpdateSavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkUpdateSavedObjectsResponse, error) { + rsp, err := c.BulkUpdateSavedObjects(ctx, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseBulkUpdateSavedObjectsResponse(rsp) +} - if raw, found := object["categories"]; found { - err = json.Unmarshal(raw, &a.Categories) - if err != nil { - return fmt.Errorf("error reading 'categories': %w", err) - } - delete(object, "categories") +// PostSavedObjectsExportWithBodyWithResponse request with arbitrary body returning *PostSavedObjectsExportResponse +func (c *ClientWithResponses) PostSavedObjectsExportWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSavedObjectsExportResponse, error) { + rsp, err := c.PostSavedObjectsExportWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSavedObjectsExportResponse(rsp) +} - if raw, found := object["conditions"]; found { - err = json.Unmarshal(raw, &a.Conditions) - if err != nil { - return fmt.Errorf("error reading 'conditions': %w", err) - } - delete(object, "conditions") +func (c *ClientWithResponses) PostSavedObjectsExportWithResponse(ctx context.Context, body PostSavedObjectsExportJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSavedObjectsExportResponse, error) { + rsp, err := c.PostSavedObjectsExport(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSavedObjectsExportResponse(rsp) +} - if raw, found := object["data_streams"]; found { - err = json.Unmarshal(raw, &a.DataStreams) - if err != nil { - return fmt.Errorf("error reading 'data_streams': %w", err) - } - delete(object, "data_streams") +// FindSavedObjectsWithResponse request returning *FindSavedObjectsResponse +func (c *ClientWithResponses) FindSavedObjectsWithResponse(ctx context.Context, params *FindSavedObjectsParams, reqEditors ...RequestEditorFn) (*FindSavedObjectsResponse, error) { + rsp, err := c.FindSavedObjects(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseFindSavedObjectsResponse(rsp) +} - if raw, found := object["description"]; found { - err = json.Unmarshal(raw, &a.Description) - if err != nil { - return fmt.Errorf("error reading 'description': %w", err) - } - delete(object, "description") +// PostSavedObjectsImportWithBodyWithResponse request with arbitrary body returning *PostSavedObjectsImportResponse +func (c *ClientWithResponses) PostSavedObjectsImportWithBodyWithResponse(ctx context.Context, params *PostSavedObjectsImportParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSavedObjectsImportResponse, error) { + rsp, err := c.PostSavedObjectsImportWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSavedObjectsImportResponse(rsp) +} - if raw, found := object["discovery"]; found { - err = json.Unmarshal(raw, &a.Discovery) - if err != nil { - return fmt.Errorf("error reading 'discovery': %w", err) - } - delete(object, "discovery") +// ResolveImportErrorsWithBodyWithResponse request with arbitrary body returning *ResolveImportErrorsResponse +func (c *ClientWithResponses) ResolveImportErrorsWithBodyWithResponse(ctx context.Context, params *ResolveImportErrorsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ResolveImportErrorsResponse, error) { + rsp, err := c.ResolveImportErrorsWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseResolveImportErrorsResponse(rsp) +} - if raw, found := object["download"]; found { - err = json.Unmarshal(raw, &a.Download) - if err != nil { - return fmt.Errorf("error reading 'download': %w", err) - } - delete(object, "download") +// ResolveSavedObjectWithResponse request returning *ResolveSavedObjectResponse +func (c *ClientWithResponses) ResolveSavedObjectWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, reqEditors ...RequestEditorFn) (*ResolveSavedObjectResponse, error) { + rsp, err := c.ResolveSavedObject(ctx, pType, id, reqEditors...) + if err != nil { + return nil, err } + return ParseResolveSavedObjectResponse(rsp) +} - if raw, found := object["format_version"]; found { - err = json.Unmarshal(raw, &a.FormatVersion) - if err != nil { - return fmt.Errorf("error reading 'format_version': %w", err) - } - delete(object, "format_version") +// CreateSavedObjectWithBodyWithResponse request with arbitrary body returning *CreateSavedObjectResponse +func (c *ClientWithResponses) CreateSavedObjectWithBodyWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, params *CreateSavedObjectParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateSavedObjectResponse, error) { + rsp, err := c.CreateSavedObjectWithBody(ctx, pType, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateSavedObjectResponse(rsp) +} - if raw, found := object["icons"]; found { - err = json.Unmarshal(raw, &a.Icons) - if err != nil { - return fmt.Errorf("error reading 'icons': %w", err) - } - delete(object, "icons") +func (c *ClientWithResponses) CreateSavedObjectWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, params *CreateSavedObjectParams, body CreateSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateSavedObjectResponse, error) { + rsp, err := c.CreateSavedObject(ctx, pType, params, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateSavedObjectResponse(rsp) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// GetSavedObjectWithResponse request returning *GetSavedObjectResponse +func (c *ClientWithResponses) GetSavedObjectWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, reqEditors ...RequestEditorFn) (*GetSavedObjectResponse, error) { + rsp, err := c.GetSavedObject(ctx, pType, id, reqEditors...) + if err != nil { + return nil, err } + return ParseGetSavedObjectResponse(rsp) +} - if raw, found := object["installationInfo"]; found { - err = json.Unmarshal(raw, &a.InstallationInfo) - if err != nil { - return fmt.Errorf("error reading 'installationInfo': %w", err) - } - delete(object, "installationInfo") +// CreateSavedObjectIdWithBodyWithResponse request with arbitrary body returning *CreateSavedObjectIdResponse +func (c *ClientWithResponses) CreateSavedObjectIdWithBodyWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, params *CreateSavedObjectIdParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateSavedObjectIdResponse, error) { + rsp, err := c.CreateSavedObjectIdWithBody(ctx, pType, id, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateSavedObjectIdResponse(rsp) +} - if raw, found := object["integration"]; found { - err = json.Unmarshal(raw, &a.Integration) - if err != nil { - return fmt.Errorf("error reading 'integration': %w", err) - } - delete(object, "integration") +func (c *ClientWithResponses) CreateSavedObjectIdWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, params *CreateSavedObjectIdParams, body CreateSavedObjectIdJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateSavedObjectIdResponse, error) { + rsp, err := c.CreateSavedObjectId(ctx, pType, id, params, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateSavedObjectIdResponse(rsp) +} - if raw, found := object["internal"]; found { - err = json.Unmarshal(raw, &a.Internal) - if err != nil { - return fmt.Errorf("error reading 'internal': %w", err) - } - delete(object, "internal") +// UpdateSavedObjectWithBodyWithResponse request with arbitrary body returning *UpdateSavedObjectResponse +func (c *ClientWithResponses) UpdateSavedObjectWithBodyWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateSavedObjectResponse, error) { + rsp, err := c.UpdateSavedObjectWithBody(ctx, pType, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateSavedObjectResponse(rsp) +} - if raw, found := object["latestVersion"]; found { - err = json.Unmarshal(raw, &a.LatestVersion) - if err != nil { - return fmt.Errorf("error reading 'latestVersion': %w", err) - } - delete(object, "latestVersion") +func (c *ClientWithResponses) UpdateSavedObjectWithResponse(ctx context.Context, pType SavedObjectsSavedObjectType, id SavedObjectsSavedObjectId, body UpdateSavedObjectJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateSavedObjectResponse, error) { + rsp, err := c.UpdateSavedObject(ctx, pType, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateSavedObjectResponse(rsp) +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// GetSecurityRoleWithResponse request returning *GetSecurityRoleResponse +func (c *ClientWithResponses) GetSecurityRoleWithResponse(ctx context.Context, params *GetSecurityRoleParams, reqEditors ...RequestEditorFn) (*GetSecurityRoleResponse, error) { + rsp, err := c.GetSecurityRole(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetSecurityRoleResponse(rsp) +} - if raw, found := object["owner"]; found { - err = json.Unmarshal(raw, &a.Owner) - if err != nil { - return fmt.Errorf("error reading 'owner': %w", err) - } - delete(object, "owner") +// PostSecurityRoleQueryWithBodyWithResponse request with arbitrary body returning *PostSecurityRoleQueryResponse +func (c *ClientWithResponses) PostSecurityRoleQueryWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSecurityRoleQueryResponse, error) { + rsp, err := c.PostSecurityRoleQueryWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSecurityRoleQueryResponse(rsp) +} - if raw, found := object["path"]; found { - err = json.Unmarshal(raw, &a.Path) - if err != nil { - return fmt.Errorf("error reading 'path': %w", err) - } - delete(object, "path") +func (c *ClientWithResponses) PostSecurityRoleQueryWithResponse(ctx context.Context, body PostSecurityRoleQueryJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSecurityRoleQueryResponse, error) { + rsp, err := c.PostSecurityRoleQuery(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSecurityRoleQueryResponse(rsp) +} - if raw, found := object["policy_templates"]; found { - err = json.Unmarshal(raw, &a.PolicyTemplates) - if err != nil { - return fmt.Errorf("error reading 'policy_templates': %w", err) - } - delete(object, "policy_templates") +// DeleteSecurityRoleNameWithResponse request returning *DeleteSecurityRoleNameResponse +func (c *ClientWithResponses) DeleteSecurityRoleNameWithResponse(ctx context.Context, name string, reqEditors ...RequestEditorFn) (*DeleteSecurityRoleNameResponse, error) { + rsp, err := c.DeleteSecurityRoleName(ctx, name, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteSecurityRoleNameResponse(rsp) +} - if raw, found := object["readme"]; found { - err = json.Unmarshal(raw, &a.Readme) - if err != nil { - return fmt.Errorf("error reading 'readme': %w", err) - } - delete(object, "readme") +// GetSecurityRoleNameWithResponse request returning *GetSecurityRoleNameResponse +func (c *ClientWithResponses) GetSecurityRoleNameWithResponse(ctx context.Context, name string, params *GetSecurityRoleNameParams, reqEditors ...RequestEditorFn) (*GetSecurityRoleNameResponse, error) { + rsp, err := c.GetSecurityRoleName(ctx, name, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetSecurityRoleNameResponse(rsp) +} - if raw, found := object["release"]; found { - err = json.Unmarshal(raw, &a.Release) - if err != nil { - return fmt.Errorf("error reading 'release': %w", err) - } - delete(object, "release") +// PutSecurityRoleNameWithBodyWithResponse request with arbitrary body returning *PutSecurityRoleNameResponse +func (c *ClientWithResponses) PutSecurityRoleNameWithBodyWithResponse(ctx context.Context, name string, params *PutSecurityRoleNameParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutSecurityRoleNameResponse, error) { + rsp, err := c.PutSecurityRoleNameWithBody(ctx, name, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutSecurityRoleNameResponse(rsp) +} - if raw, found := object["signature_path"]; found { - err = json.Unmarshal(raw, &a.SignaturePath) - if err != nil { - return fmt.Errorf("error reading 'signature_path': %w", err) - } - delete(object, "signature_path") +func (c *ClientWithResponses) PutSecurityRoleNameWithResponse(ctx context.Context, name string, params *PutSecurityRoleNameParams, body PutSecurityRoleNameJSONRequestBody, reqEditors ...RequestEditorFn) (*PutSecurityRoleNameResponse, error) { + rsp, err := c.PutSecurityRoleName(ctx, name, params, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutSecurityRoleNameResponse(rsp) +} - if raw, found := object["source"]; found { - err = json.Unmarshal(raw, &a.Source) - if err != nil { - return fmt.Errorf("error reading 'source': %w", err) - } - delete(object, "source") +// PostSecurityRolesWithBodyWithResponse request with arbitrary body returning *PostSecurityRolesResponse +func (c *ClientWithResponses) PostSecurityRolesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSecurityRolesResponse, error) { + rsp, err := c.PostSecurityRolesWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSecurityRolesResponse(rsp) +} - if raw, found := object["status"]; found { - err = json.Unmarshal(raw, &a.Status) - if err != nil { - return fmt.Errorf("error reading 'status': %w", err) - } - delete(object, "status") +func (c *ClientWithResponses) PostSecurityRolesWithResponse(ctx context.Context, body PostSecurityRolesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSecurityRolesResponse, error) { + rsp, err := c.PostSecurityRoles(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSecurityRolesResponse(rsp) +} - if raw, found := object["title"]; found { - err = json.Unmarshal(raw, &a.Title) - if err != nil { - return fmt.Errorf("error reading 'title': %w", err) - } - delete(object, "title") +// PostSecuritySessionInvalidateWithBodyWithResponse request with arbitrary body returning *PostSecuritySessionInvalidateResponse +func (c *ClientWithResponses) PostSecuritySessionInvalidateWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSecuritySessionInvalidateResponse, error) { + rsp, err := c.PostSecuritySessionInvalidateWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSecuritySessionInvalidateResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +func (c *ClientWithResponses) PostSecuritySessionInvalidateWithResponse(ctx context.Context, body PostSecuritySessionInvalidateJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSecuritySessionInvalidateResponse, error) { + rsp, err := c.PostSecuritySessionInvalidate(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSecuritySessionInvalidateResponse(rsp) +} - if raw, found := object["vars"]; found { - err = json.Unmarshal(raw, &a.Vars) - if err != nil { - return fmt.Errorf("error reading 'vars': %w", err) - } - delete(object, "vars") +// PerformAnonymizationFieldsBulkActionWithBodyWithResponse request with arbitrary body returning *PerformAnonymizationFieldsBulkActionResponse +func (c *ClientWithResponses) PerformAnonymizationFieldsBulkActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PerformAnonymizationFieldsBulkActionResponse, error) { + rsp, err := c.PerformAnonymizationFieldsBulkActionWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePerformAnonymizationFieldsBulkActionResponse(rsp) +} - if raw, found := object["version"]; found { - err = json.Unmarshal(raw, &a.Version) - if err != nil { - return fmt.Errorf("error reading 'version': %w", err) - } - delete(object, "version") +func (c *ClientWithResponses) PerformAnonymizationFieldsBulkActionWithResponse(ctx context.Context, body PerformAnonymizationFieldsBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*PerformAnonymizationFieldsBulkActionResponse, error) { + rsp, err := c.PerformAnonymizationFieldsBulkAction(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePerformAnonymizationFieldsBulkActionResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// FindAnonymizationFieldsWithResponse request returning *FindAnonymizationFieldsResponse +func (c *ClientWithResponses) FindAnonymizationFieldsWithResponse(ctx context.Context, params *FindAnonymizationFieldsParams, reqEditors ...RequestEditorFn) (*FindAnonymizationFieldsResponse, error) { + rsp, err := c.FindAnonymizationFields(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseFindAnonymizationFieldsResponse(rsp) } -// Override default JSON handling for PackageListItem to handle AdditionalProperties -func (a PackageListItem) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// ChatCompleteWithBodyWithResponse request with arbitrary body returning *ChatCompleteResponse +func (c *ClientWithResponses) ChatCompleteWithBodyWithResponse(ctx context.Context, params *ChatCompleteParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ChatCompleteResponse, error) { + rsp, err := c.ChatCompleteWithBody(ctx, params, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseChatCompleteResponse(rsp) +} - if a.Categories != nil { - object["categories"], err = json.Marshal(a.Categories) - if err != nil { - return nil, fmt.Errorf("error marshaling 'categories': %w", err) - } +func (c *ClientWithResponses) ChatCompleteWithResponse(ctx context.Context, params *ChatCompleteParams, body ChatCompleteJSONRequestBody, reqEditors ...RequestEditorFn) (*ChatCompleteResponse, error) { + rsp, err := c.ChatComplete(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } + return ParseChatCompleteResponse(rsp) +} - if a.Conditions != nil { - object["conditions"], err = json.Marshal(a.Conditions) - if err != nil { - return nil, fmt.Errorf("error marshaling 'conditions': %w", err) - } +// DeleteAllConversationsWithBodyWithResponse request with arbitrary body returning *DeleteAllConversationsResponse +func (c *ClientWithResponses) DeleteAllConversationsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteAllConversationsResponse, error) { + rsp, err := c.DeleteAllConversationsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteAllConversationsResponse(rsp) +} - if a.DataStreams != nil { - object["data_streams"], err = json.Marshal(a.DataStreams) - if err != nil { - return nil, fmt.Errorf("error marshaling 'data_streams': %w", err) - } +func (c *ClientWithResponses) DeleteAllConversationsWithResponse(ctx context.Context, body DeleteAllConversationsJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteAllConversationsResponse, error) { + rsp, err := c.DeleteAllConversations(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteAllConversationsResponse(rsp) +} - if a.Description != nil { - object["description"], err = json.Marshal(a.Description) - if err != nil { - return nil, fmt.Errorf("error marshaling 'description': %w", err) - } +// CreateConversationWithBodyWithResponse request with arbitrary body returning *CreateConversationResponse +func (c *ClientWithResponses) CreateConversationWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateConversationResponse, error) { + rsp, err := c.CreateConversationWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateConversationResponse(rsp) +} - if a.Discovery != nil { - object["discovery"], err = json.Marshal(a.Discovery) - if err != nil { - return nil, fmt.Errorf("error marshaling 'discovery': %w", err) - } +func (c *ClientWithResponses) CreateConversationWithResponse(ctx context.Context, body CreateConversationJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateConversationResponse, error) { + rsp, err := c.CreateConversation(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateConversationResponse(rsp) +} - if a.Download != nil { - object["download"], err = json.Marshal(a.Download) - if err != nil { - return nil, fmt.Errorf("error marshaling 'download': %w", err) - } +// FindConversationsWithResponse request returning *FindConversationsResponse +func (c *ClientWithResponses) FindConversationsWithResponse(ctx context.Context, params *FindConversationsParams, reqEditors ...RequestEditorFn) (*FindConversationsResponse, error) { + rsp, err := c.FindConversations(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseFindConversationsResponse(rsp) +} - if a.FormatVersion != nil { - object["format_version"], err = json.Marshal(a.FormatVersion) - if err != nil { - return nil, fmt.Errorf("error marshaling 'format_version': %w", err) - } +// DeleteConversationWithResponse request returning *DeleteConversationResponse +func (c *ClientWithResponses) DeleteConversationWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*DeleteConversationResponse, error) { + rsp, err := c.DeleteConversation(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteConversationResponse(rsp) +} - if a.Icons != nil { - object["icons"], err = json.Marshal(a.Icons) - if err != nil { - return nil, fmt.Errorf("error marshaling 'icons': %w", err) - } +// ReadConversationWithResponse request returning *ReadConversationResponse +func (c *ClientWithResponses) ReadConversationWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*ReadConversationResponse, error) { + rsp, err := c.ReadConversation(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseReadConversationResponse(rsp) +} - object["id"], err = json.Marshal(a.Id) +// UpdateConversationWithBodyWithResponse request with arbitrary body returning *UpdateConversationResponse +func (c *ClientWithResponses) UpdateConversationWithBodyWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateConversationResponse, error) { + rsp, err := c.UpdateConversationWithBody(ctx, id, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) + return nil, err } + return ParseUpdateConversationResponse(rsp) +} - if a.InstallationInfo != nil { - object["installationInfo"], err = json.Marshal(a.InstallationInfo) - if err != nil { - return nil, fmt.Errorf("error marshaling 'installationInfo': %w", err) - } +func (c *ClientWithResponses) UpdateConversationWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, body UpdateConversationJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateConversationResponse, error) { + rsp, err := c.UpdateConversation(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateConversationResponse(rsp) +} - if a.Integration != nil { - object["integration"], err = json.Marshal(a.Integration) - if err != nil { - return nil, fmt.Errorf("error marshaling 'integration': %w", err) - } +// CreateKnowledgeBaseEntryWithBodyWithResponse request with arbitrary body returning *CreateKnowledgeBaseEntryResponse +func (c *ClientWithResponses) CreateKnowledgeBaseEntryWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateKnowledgeBaseEntryResponse, error) { + rsp, err := c.CreateKnowledgeBaseEntryWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateKnowledgeBaseEntryResponse(rsp) +} - if a.Internal != nil { - object["internal"], err = json.Marshal(a.Internal) - if err != nil { - return nil, fmt.Errorf("error marshaling 'internal': %w", err) - } +func (c *ClientWithResponses) CreateKnowledgeBaseEntryWithResponse(ctx context.Context, body CreateKnowledgeBaseEntryJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateKnowledgeBaseEntryResponse, error) { + rsp, err := c.CreateKnowledgeBaseEntry(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateKnowledgeBaseEntryResponse(rsp) +} - if a.LatestVersion != nil { - object["latestVersion"], err = json.Marshal(a.LatestVersion) - if err != nil { - return nil, fmt.Errorf("error marshaling 'latestVersion': %w", err) - } +// PerformKnowledgeBaseEntryBulkActionWithBodyWithResponse request with arbitrary body returning *PerformKnowledgeBaseEntryBulkActionResponse +func (c *ClientWithResponses) PerformKnowledgeBaseEntryBulkActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PerformKnowledgeBaseEntryBulkActionResponse, error) { + rsp, err := c.PerformKnowledgeBaseEntryBulkActionWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePerformKnowledgeBaseEntryBulkActionResponse(rsp) +} - object["name"], err = json.Marshal(a.Name) +func (c *ClientWithResponses) PerformKnowledgeBaseEntryBulkActionWithResponse(ctx context.Context, body PerformKnowledgeBaseEntryBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*PerformKnowledgeBaseEntryBulkActionResponse, error) { + rsp, err := c.PerformKnowledgeBaseEntryBulkAction(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) + return nil, err } + return ParsePerformKnowledgeBaseEntryBulkActionResponse(rsp) +} - if a.Owner != nil { - object["owner"], err = json.Marshal(a.Owner) - if err != nil { - return nil, fmt.Errorf("error marshaling 'owner': %w", err) - } +// FindKnowledgeBaseEntriesWithResponse request returning *FindKnowledgeBaseEntriesResponse +func (c *ClientWithResponses) FindKnowledgeBaseEntriesWithResponse(ctx context.Context, params *FindKnowledgeBaseEntriesParams, reqEditors ...RequestEditorFn) (*FindKnowledgeBaseEntriesResponse, error) { + rsp, err := c.FindKnowledgeBaseEntries(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseFindKnowledgeBaseEntriesResponse(rsp) +} - if a.Path != nil { - object["path"], err = json.Marshal(a.Path) - if err != nil { - return nil, fmt.Errorf("error marshaling 'path': %w", err) - } +// DeleteKnowledgeBaseEntryWithResponse request returning *DeleteKnowledgeBaseEntryResponse +func (c *ClientWithResponses) DeleteKnowledgeBaseEntryWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*DeleteKnowledgeBaseEntryResponse, error) { + rsp, err := c.DeleteKnowledgeBaseEntry(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteKnowledgeBaseEntryResponse(rsp) +} - if a.PolicyTemplates != nil { - object["policy_templates"], err = json.Marshal(a.PolicyTemplates) - if err != nil { - return nil, fmt.Errorf("error marshaling 'policy_templates': %w", err) - } +// ReadKnowledgeBaseEntryWithResponse request returning *ReadKnowledgeBaseEntryResponse +func (c *ClientWithResponses) ReadKnowledgeBaseEntryWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, reqEditors ...RequestEditorFn) (*ReadKnowledgeBaseEntryResponse, error) { + rsp, err := c.ReadKnowledgeBaseEntry(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseReadKnowledgeBaseEntryResponse(rsp) +} - if a.Readme != nil { - object["readme"], err = json.Marshal(a.Readme) - if err != nil { - return nil, fmt.Errorf("error marshaling 'readme': %w", err) - } +// UpdateKnowledgeBaseEntryWithBodyWithResponse request with arbitrary body returning *UpdateKnowledgeBaseEntryResponse +func (c *ClientWithResponses) UpdateKnowledgeBaseEntryWithBodyWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateKnowledgeBaseEntryResponse, error) { + rsp, err := c.UpdateKnowledgeBaseEntryWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateKnowledgeBaseEntryResponse(rsp) +} - if a.Release != nil { - object["release"], err = json.Marshal(a.Release) - if err != nil { - return nil, fmt.Errorf("error marshaling 'release': %w", err) - } +func (c *ClientWithResponses) UpdateKnowledgeBaseEntryWithResponse(ctx context.Context, id SecurityAIAssistantAPINonEmptyString, body UpdateKnowledgeBaseEntryJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateKnowledgeBaseEntryResponse, error) { + rsp, err := c.UpdateKnowledgeBaseEntry(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateKnowledgeBaseEntryResponse(rsp) +} - if a.SignaturePath != nil { - object["signature_path"], err = json.Marshal(a.SignaturePath) - if err != nil { - return nil, fmt.Errorf("error marshaling 'signature_path': %w", err) - } +// ReadKnowledgeBaseWithResponse request returning *ReadKnowledgeBaseResponse +func (c *ClientWithResponses) ReadKnowledgeBaseWithResponse(ctx context.Context, resource string, reqEditors ...RequestEditorFn) (*ReadKnowledgeBaseResponse, error) { + rsp, err := c.ReadKnowledgeBase(ctx, resource, reqEditors...) + if err != nil { + return nil, err } + return ParseReadKnowledgeBaseResponse(rsp) +} - if a.Source != nil { - object["source"], err = json.Marshal(a.Source) - if err != nil { - return nil, fmt.Errorf("error marshaling 'source': %w", err) - } +// CreateKnowledgeBaseWithResponse request returning *CreateKnowledgeBaseResponse +func (c *ClientWithResponses) CreateKnowledgeBaseWithResponse(ctx context.Context, resource string, params *CreateKnowledgeBaseParams, reqEditors ...RequestEditorFn) (*CreateKnowledgeBaseResponse, error) { + rsp, err := c.CreateKnowledgeBase(ctx, resource, params, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateKnowledgeBaseResponse(rsp) +} - if a.Status != nil { - object["status"], err = json.Marshal(a.Status) - if err != nil { - return nil, fmt.Errorf("error marshaling 'status': %w", err) - } +// PerformPromptsBulkActionWithBodyWithResponse request with arbitrary body returning *PerformPromptsBulkActionResponse +func (c *ClientWithResponses) PerformPromptsBulkActionWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PerformPromptsBulkActionResponse, error) { + rsp, err := c.PerformPromptsBulkActionWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePerformPromptsBulkActionResponse(rsp) +} - object["title"], err = json.Marshal(a.Title) +func (c *ClientWithResponses) PerformPromptsBulkActionWithResponse(ctx context.Context, body PerformPromptsBulkActionJSONRequestBody, reqEditors ...RequestEditorFn) (*PerformPromptsBulkActionResponse, error) { + rsp, err := c.PerformPromptsBulkAction(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'title': %w", err) + return nil, err } + return ParsePerformPromptsBulkActionResponse(rsp) +} - if a.Type != nil { - object["type"], err = json.Marshal(a.Type) - if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) - } +// FindPromptsWithResponse request returning *FindPromptsResponse +func (c *ClientWithResponses) FindPromptsWithResponse(ctx context.Context, params *FindPromptsParams, reqEditors ...RequestEditorFn) (*FindPromptsResponse, error) { + rsp, err := c.FindPrompts(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseFindPromptsResponse(rsp) +} - if a.Vars != nil { - object["vars"], err = json.Marshal(a.Vars) - if err != nil { - return nil, fmt.Errorf("error marshaling 'vars': %w", err) - } +// PostUrlWithBodyWithResponse request with arbitrary body returning *PostUrlResponse +func (c *ClientWithResponses) PostUrlWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostUrlResponse, error) { + rsp, err := c.PostUrlWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostUrlResponse(rsp) +} - object["version"], err = json.Marshal(a.Version) +func (c *ClientWithResponses) PostUrlWithResponse(ctx context.Context, body PostUrlJSONRequestBody, reqEditors ...RequestEditorFn) (*PostUrlResponse, error) { + rsp, err := c.PostUrl(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'version': %w", err) + return nil, err } + return ParsePostUrlResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// ResolveUrlWithResponse request returning *ResolveUrlResponse +func (c *ClientWithResponses) ResolveUrlWithResponse(ctx context.Context, slug string, reqEditors ...RequestEditorFn) (*ResolveUrlResponse, error) { + rsp, err := c.ResolveUrl(ctx, slug, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseResolveUrlResponse(rsp) } -// Getter for additional properties for PackageListItem_Conditions_Elastic. Returns the specified -// element and whether it was found -func (a PackageListItem_Conditions_Elastic) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// DeleteUrlWithResponse request returning *DeleteUrlResponse +func (c *ClientWithResponses) DeleteUrlWithResponse(ctx context.Context, id ShortURLAPIsIdParam, reqEditors ...RequestEditorFn) (*DeleteUrlResponse, error) { + rsp, err := c.DeleteUrl(ctx, id, reqEditors...) + if err != nil { + return nil, err } - return + return ParseDeleteUrlResponse(rsp) } -// Setter for additional properties for PackageListItem_Conditions_Elastic -func (a *PackageListItem_Conditions_Elastic) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// GetUrlWithResponse request returning *GetUrlResponse +func (c *ClientWithResponses) GetUrlWithResponse(ctx context.Context, id ShortURLAPIsIdParam, reqEditors ...RequestEditorFn) (*GetUrlResponse, error) { + rsp, err := c.GetUrl(ctx, id, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseGetUrlResponse(rsp) } -// Override default JSON handling for PackageListItem_Conditions_Elastic to handle AdditionalProperties -func (a *PackageListItem_Conditions_Elastic) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// PostSpacesCopySavedObjectsWithBodyWithResponse request with arbitrary body returning *PostSpacesCopySavedObjectsResponse +func (c *ClientWithResponses) PostSpacesCopySavedObjectsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesCopySavedObjectsResponse, error) { + rsp, err := c.PostSpacesCopySavedObjectsWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePostSpacesCopySavedObjectsResponse(rsp) +} - if raw, found := object["capabilities"]; found { - err = json.Unmarshal(raw, &a.Capabilities) - if err != nil { - return fmt.Errorf("error reading 'capabilities': %w", err) - } - delete(object, "capabilities") +func (c *ClientWithResponses) PostSpacesCopySavedObjectsWithResponse(ctx context.Context, body PostSpacesCopySavedObjectsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesCopySavedObjectsResponse, error) { + rsp, err := c.PostSpacesCopySavedObjects(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSpacesCopySavedObjectsResponse(rsp) +} - if raw, found := object["subscription"]; found { - err = json.Unmarshal(raw, &a.Subscription) - if err != nil { - return fmt.Errorf("error reading 'subscription': %w", err) - } - delete(object, "subscription") +// PostSpacesDisableLegacyUrlAliasesWithBodyWithResponse request with arbitrary body returning *PostSpacesDisableLegacyUrlAliasesResponse +func (c *ClientWithResponses) PostSpacesDisableLegacyUrlAliasesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesDisableLegacyUrlAliasesResponse, error) { + rsp, err := c.PostSpacesDisableLegacyUrlAliasesWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSpacesDisableLegacyUrlAliasesResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PostSpacesDisableLegacyUrlAliasesWithResponse(ctx context.Context, body PostSpacesDisableLegacyUrlAliasesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesDisableLegacyUrlAliasesResponse, error) { + rsp, err := c.PostSpacesDisableLegacyUrlAliases(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePostSpacesDisableLegacyUrlAliasesResponse(rsp) } -// Override default JSON handling for PackageListItem_Conditions_Elastic to handle AdditionalProperties -func (a PackageListItem_Conditions_Elastic) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// PostSpacesGetShareableReferencesWithBodyWithResponse request with arbitrary body returning *PostSpacesGetShareableReferencesResponse +func (c *ClientWithResponses) PostSpacesGetShareableReferencesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesGetShareableReferencesResponse, error) { + rsp, err := c.PostSpacesGetShareableReferencesWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostSpacesGetShareableReferencesResponse(rsp) +} - if a.Capabilities != nil { - object["capabilities"], err = json.Marshal(a.Capabilities) - if err != nil { - return nil, fmt.Errorf("error marshaling 'capabilities': %w", err) - } +func (c *ClientWithResponses) PostSpacesGetShareableReferencesWithResponse(ctx context.Context, body PostSpacesGetShareableReferencesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesGetShareableReferencesResponse, error) { + rsp, err := c.PostSpacesGetShareableReferences(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSpacesGetShareableReferencesResponse(rsp) +} - if a.Subscription != nil { - object["subscription"], err = json.Marshal(a.Subscription) - if err != nil { - return nil, fmt.Errorf("error marshaling 'subscription': %w", err) - } +// PostSpacesResolveCopySavedObjectsErrorsWithBodyWithResponse request with arbitrary body returning *PostSpacesResolveCopySavedObjectsErrorsResponse +func (c *ClientWithResponses) PostSpacesResolveCopySavedObjectsErrorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesResolveCopySavedObjectsErrorsResponse, error) { + rsp, err := c.PostSpacesResolveCopySavedObjectsErrorsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSpacesResolveCopySavedObjectsErrorsResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) PostSpacesResolveCopySavedObjectsErrorsWithResponse(ctx context.Context, body PostSpacesResolveCopySavedObjectsErrorsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesResolveCopySavedObjectsErrorsResponse, error) { + rsp, err := c.PostSpacesResolveCopySavedObjectsErrors(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostSpacesResolveCopySavedObjectsErrorsResponse(rsp) } -// Getter for additional properties for PackageListItem_Conditions_Kibana. Returns the specified -// element and whether it was found -func (a PackageListItem_Conditions_Kibana) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// PostSpacesUpdateObjectsSpacesWithBodyWithResponse request with arbitrary body returning *PostSpacesUpdateObjectsSpacesResponse +func (c *ClientWithResponses) PostSpacesUpdateObjectsSpacesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesUpdateObjectsSpacesResponse, error) { + rsp, err := c.PostSpacesUpdateObjectsSpacesWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostSpacesUpdateObjectsSpacesResponse(rsp) } -// Setter for additional properties for PackageListItem_Conditions_Kibana -func (a *PackageListItem_Conditions_Kibana) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) PostSpacesUpdateObjectsSpacesWithResponse(ctx context.Context, body PostSpacesUpdateObjectsSpacesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesUpdateObjectsSpacesResponse, error) { + rsp, err := c.PostSpacesUpdateObjectsSpaces(ctx, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePostSpacesUpdateObjectsSpacesResponse(rsp) } -// Override default JSON handling for PackageListItem_Conditions_Kibana to handle AdditionalProperties -func (a *PackageListItem_Conditions_Kibana) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// GetSpacesSpaceWithResponse request returning *GetSpacesSpaceResponse +func (c *ClientWithResponses) GetSpacesSpaceWithResponse(ctx context.Context, params *GetSpacesSpaceParams, reqEditors ...RequestEditorFn) (*GetSpacesSpaceResponse, error) { + rsp, err := c.GetSpacesSpace(ctx, params, reqEditors...) if err != nil { - return err + return nil, err } + return ParseGetSpacesSpaceResponse(rsp) +} - if raw, found := object["version"]; found { - err = json.Unmarshal(raw, &a.Version) - if err != nil { - return fmt.Errorf("error reading 'version': %w", err) - } - delete(object, "version") +// PostSpacesSpaceWithBodyWithResponse request with arbitrary body returning *PostSpacesSpaceResponse +func (c *ClientWithResponses) PostSpacesSpaceWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSpacesSpaceResponse, error) { + rsp, err := c.PostSpacesSpaceWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSpacesSpaceResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PostSpacesSpaceWithResponse(ctx context.Context, body PostSpacesSpaceJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSpacesSpaceResponse, error) { + rsp, err := c.PostSpacesSpace(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePostSpacesSpaceResponse(rsp) } -// Override default JSON handling for PackageListItem_Conditions_Kibana to handle AdditionalProperties -func (a PackageListItem_Conditions_Kibana) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// DeleteSpacesSpaceIdWithResponse request returning *DeleteSpacesSpaceIdResponse +func (c *ClientWithResponses) DeleteSpacesSpaceIdWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteSpacesSpaceIdResponse, error) { + rsp, err := c.DeleteSpacesSpaceId(ctx, id, reqEditors...) + if err != nil { + return nil, err + } + return ParseDeleteSpacesSpaceIdResponse(rsp) +} - if a.Version != nil { - object["version"], err = json.Marshal(a.Version) - if err != nil { - return nil, fmt.Errorf("error marshaling 'version': %w", err) - } +// GetSpacesSpaceIdWithResponse request returning *GetSpacesSpaceIdResponse +func (c *ClientWithResponses) GetSpacesSpaceIdWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetSpacesSpaceIdResponse, error) { + rsp, err := c.GetSpacesSpaceId(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseGetSpacesSpaceIdResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// PutSpacesSpaceIdWithBodyWithResponse request with arbitrary body returning *PutSpacesSpaceIdResponse +func (c *ClientWithResponses) PutSpacesSpaceIdWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutSpacesSpaceIdResponse, error) { + rsp, err := c.PutSpacesSpaceIdWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePutSpacesSpaceIdResponse(rsp) } -// Getter for additional properties for PackageListItem_Conditions. Returns the specified -// element and whether it was found -func (a PackageListItem_Conditions) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) PutSpacesSpaceIdWithResponse(ctx context.Context, id string, body PutSpacesSpaceIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PutSpacesSpaceIdResponse, error) { + rsp, err := c.PutSpacesSpaceId(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePutSpacesSpaceIdResponse(rsp) } -// Setter for additional properties for PackageListItem_Conditions -func (a *PackageListItem_Conditions) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// GetStatusWithResponse request returning *GetStatusResponse +func (c *ClientWithResponses) GetStatusWithResponse(ctx context.Context, params *GetStatusParams, reqEditors ...RequestEditorFn) (*GetStatusResponse, error) { + rsp, err := c.GetStatus(ctx, params, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseGetStatusResponse(rsp) } -// Override default JSON handling for PackageListItem_Conditions to handle AdditionalProperties -func (a *PackageListItem_Conditions) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// GetStreamsWithBodyWithResponse request with arbitrary body returning *GetStreamsResponse +func (c *ClientWithResponses) GetStreamsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsResponse, error) { + rsp, err := c.GetStreamsWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseGetStreamsResponse(rsp) +} - if raw, found := object["elastic"]; found { - err = json.Unmarshal(raw, &a.Elastic) - if err != nil { - return fmt.Errorf("error reading 'elastic': %w", err) - } - delete(object, "elastic") +func (c *ClientWithResponses) GetStreamsWithResponse(ctx context.Context, body GetStreamsJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsResponse, error) { + rsp, err := c.GetStreams(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseGetStreamsResponse(rsp) +} - if raw, found := object["kibana"]; found { - err = json.Unmarshal(raw, &a.Kibana) - if err != nil { - return fmt.Errorf("error reading 'kibana': %w", err) - } - delete(object, "kibana") +// PostStreamsDisableWithBodyWithResponse request with arbitrary body returning *PostStreamsDisableResponse +func (c *ClientWithResponses) PostStreamsDisableWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsDisableResponse, error) { + rsp, err := c.PostStreamsDisableWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostStreamsDisableResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PostStreamsDisableWithResponse(ctx context.Context, body PostStreamsDisableJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsDisableResponse, error) { + rsp, err := c.PostStreamsDisable(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePostStreamsDisableResponse(rsp) } -// Override default JSON handling for PackageListItem_Conditions to handle AdditionalProperties -func (a PackageListItem_Conditions) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// PostStreamsEnableWithBodyWithResponse request with arbitrary body returning *PostStreamsEnableResponse +func (c *ClientWithResponses) PostStreamsEnableWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsEnableResponse, error) { + rsp, err := c.PostStreamsEnableWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostStreamsEnableResponse(rsp) +} - if a.Elastic != nil { - object["elastic"], err = json.Marshal(a.Elastic) - if err != nil { - return nil, fmt.Errorf("error marshaling 'elastic': %w", err) - } +func (c *ClientWithResponses) PostStreamsEnableWithResponse(ctx context.Context, body PostStreamsEnableJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsEnableResponse, error) { + rsp, err := c.PostStreamsEnable(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostStreamsEnableResponse(rsp) +} - if a.Kibana != nil { - object["kibana"], err = json.Marshal(a.Kibana) - if err != nil { - return nil, fmt.Errorf("error marshaling 'kibana': %w", err) - } +// PostStreamsResyncWithBodyWithResponse request with arbitrary body returning *PostStreamsResyncResponse +func (c *ClientWithResponses) PostStreamsResyncWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsResyncResponse, error) { + rsp, err := c.PostStreamsResyncWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostStreamsResyncResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) PostStreamsResyncWithResponse(ctx context.Context, body PostStreamsResyncJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsResyncResponse, error) { + rsp, err := c.PostStreamsResync(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostStreamsResyncResponse(rsp) } -// Getter for additional properties for PackageListItem_Discovery_Fields_Item. Returns the specified -// element and whether it was found -func (a PackageListItem_Discovery_Fields_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// DeleteStreamsNameWithBodyWithResponse request with arbitrary body returning *DeleteStreamsNameResponse +func (c *ClientWithResponses) DeleteStreamsNameWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteStreamsNameResponse, error) { + rsp, err := c.DeleteStreamsNameWithBody(ctx, name, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseDeleteStreamsNameResponse(rsp) } -// Setter for additional properties for PackageListItem_Discovery_Fields_Item -func (a *PackageListItem_Discovery_Fields_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) DeleteStreamsNameWithResponse(ctx context.Context, name string, body DeleteStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteStreamsNameResponse, error) { + rsp, err := c.DeleteStreamsName(ctx, name, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseDeleteStreamsNameResponse(rsp) } -// Override default JSON handling for PackageListItem_Discovery_Fields_Item to handle AdditionalProperties -func (a *PackageListItem_Discovery_Fields_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// GetStreamsNameWithBodyWithResponse request with arbitrary body returning *GetStreamsNameResponse +func (c *ClientWithResponses) GetStreamsNameWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameResponse, error) { + rsp, err := c.GetStreamsNameWithBody(ctx, name, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseGetStreamsNameResponse(rsp) +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +func (c *ClientWithResponses) GetStreamsNameWithResponse(ctx context.Context, name string, body GetStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameResponse, error) { + rsp, err := c.GetStreamsName(ctx, name, body, reqEditors...) + if err != nil { + return nil, err } + return ParseGetStreamsNameResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// PutStreamsNameWithBodyWithResponse request with arbitrary body returning *PutStreamsNameResponse +func (c *ClientWithResponses) PutStreamsNameWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameResponse, error) { + rsp, err := c.PutStreamsNameWithBody(ctx, name, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePutStreamsNameResponse(rsp) } -// Override default JSON handling for PackageListItem_Discovery_Fields_Item to handle AdditionalProperties -func (a PackageListItem_Discovery_Fields_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +func (c *ClientWithResponses) PutStreamsNameWithResponse(ctx context.Context, name string, body PutStreamsNameJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameResponse, error) { + rsp, err := c.PutStreamsName(ctx, name, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePutStreamsNameResponse(rsp) +} - object["name"], err = json.Marshal(a.Name) +// PostStreamsNameForkWithBodyWithResponse request with arbitrary body returning *PostStreamsNameForkResponse +func (c *ClientWithResponses) PostStreamsNameForkWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameForkResponse, error) { + rsp, err := c.PostStreamsNameForkWithBody(ctx, name, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) + return nil, err } + return ParsePostStreamsNameForkResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) PostStreamsNameForkWithResponse(ctx context.Context, name string, body PostStreamsNameForkJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsNameForkResponse, error) { + rsp, err := c.PostStreamsNameFork(ctx, name, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostStreamsNameForkResponse(rsp) } -// Getter for additional properties for PackageListItem_Discovery. Returns the specified -// element and whether it was found -func (a PackageListItem_Discovery) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// GetStreamsNameGroupWithBodyWithResponse request with arbitrary body returning *GetStreamsNameGroupResponse +func (c *ClientWithResponses) GetStreamsNameGroupWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameGroupResponse, error) { + rsp, err := c.GetStreamsNameGroupWithBody(ctx, name, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseGetStreamsNameGroupResponse(rsp) } -// Setter for additional properties for PackageListItem_Discovery -func (a *PackageListItem_Discovery) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) GetStreamsNameGroupWithResponse(ctx context.Context, name string, body GetStreamsNameGroupJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameGroupResponse, error) { + rsp, err := c.GetStreamsNameGroup(ctx, name, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseGetStreamsNameGroupResponse(rsp) } -// Override default JSON handling for PackageListItem_Discovery to handle AdditionalProperties -func (a *PackageListItem_Discovery) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// PutStreamsNameGroupWithBodyWithResponse request with arbitrary body returning *PutStreamsNameGroupResponse +func (c *ClientWithResponses) PutStreamsNameGroupWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameGroupResponse, error) { + rsp, err := c.PutStreamsNameGroupWithBody(ctx, name, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePutStreamsNameGroupResponse(rsp) +} - if raw, found := object["fields"]; found { - err = json.Unmarshal(raw, &a.Fields) - if err != nil { - return fmt.Errorf("error reading 'fields': %w", err) - } - delete(object, "fields") +func (c *ClientWithResponses) PutStreamsNameGroupWithResponse(ctx context.Context, name string, body PutStreamsNameGroupJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameGroupResponse, error) { + rsp, err := c.PutStreamsNameGroup(ctx, name, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutStreamsNameGroupResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// GetStreamsNameIngestWithBodyWithResponse request with arbitrary body returning *GetStreamsNameIngestResponse +func (c *ClientWithResponses) GetStreamsNameIngestWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameIngestResponse, error) { + rsp, err := c.GetStreamsNameIngestWithBody(ctx, name, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseGetStreamsNameIngestResponse(rsp) } -// Override default JSON handling for PackageListItem_Discovery to handle AdditionalProperties -func (a PackageListItem_Discovery) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +func (c *ClientWithResponses) GetStreamsNameIngestWithResponse(ctx context.Context, name string, body GetStreamsNameIngestJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameIngestResponse, error) { + rsp, err := c.GetStreamsNameIngest(ctx, name, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetStreamsNameIngestResponse(rsp) +} - if a.Fields != nil { - object["fields"], err = json.Marshal(a.Fields) - if err != nil { - return nil, fmt.Errorf("error marshaling 'fields': %w", err) - } +// PutStreamsNameIngestWithBodyWithResponse request with arbitrary body returning *PutStreamsNameIngestResponse +func (c *ClientWithResponses) PutStreamsNameIngestWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameIngestResponse, error) { + rsp, err := c.PutStreamsNameIngestWithBody(ctx, name, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutStreamsNameIngestResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) PutStreamsNameIngestWithResponse(ctx context.Context, name string, body PutStreamsNameIngestJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameIngestResponse, error) { + rsp, err := c.PutStreamsNameIngest(ctx, name, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePutStreamsNameIngestResponse(rsp) } -// Getter for additional properties for PackageListItem_Icons_Item. Returns the specified -// element and whether it was found -func (a PackageListItem_Icons_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// PostStreamsNameContentExportWithBodyWithResponse request with arbitrary body returning *PostStreamsNameContentExportResponse +func (c *ClientWithResponses) PostStreamsNameContentExportWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameContentExportResponse, error) { + rsp, err := c.PostStreamsNameContentExportWithBody(ctx, name, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostStreamsNameContentExportResponse(rsp) } -// Setter for additional properties for PackageListItem_Icons_Item -func (a *PackageListItem_Icons_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) PostStreamsNameContentExportWithResponse(ctx context.Context, name string, body PostStreamsNameContentExportJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsNameContentExportResponse, error) { + rsp, err := c.PostStreamsNameContentExport(ctx, name, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePostStreamsNameContentExportResponse(rsp) } -// Override default JSON handling for PackageListItem_Icons_Item to handle AdditionalProperties -func (a *PackageListItem_Icons_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// PostStreamsNameContentImportWithBodyWithResponse request with arbitrary body returning *PostStreamsNameContentImportResponse +func (c *ClientWithResponses) PostStreamsNameContentImportWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameContentImportResponse, error) { + rsp, err := c.PostStreamsNameContentImportWithBody(ctx, name, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePostStreamsNameContentImportResponse(rsp) +} - if raw, found := object["dark_mode"]; found { - err = json.Unmarshal(raw, &a.DarkMode) - if err != nil { - return fmt.Errorf("error reading 'dark_mode': %w", err) - } - delete(object, "dark_mode") +// GetStreamsNameDashboardsWithBodyWithResponse request with arbitrary body returning *GetStreamsNameDashboardsResponse +func (c *ClientWithResponses) GetStreamsNameDashboardsWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameDashboardsResponse, error) { + rsp, err := c.GetStreamsNameDashboardsWithBody(ctx, name, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseGetStreamsNameDashboardsResponse(rsp) +} - if raw, found := object["path"]; found { - err = json.Unmarshal(raw, &a.Path) - if err != nil { - return fmt.Errorf("error reading 'path': %w", err) - } - delete(object, "path") +func (c *ClientWithResponses) GetStreamsNameDashboardsWithResponse(ctx context.Context, name string, body GetStreamsNameDashboardsJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameDashboardsResponse, error) { + rsp, err := c.GetStreamsNameDashboards(ctx, name, body, reqEditors...) + if err != nil { + return nil, err } + return ParseGetStreamsNameDashboardsResponse(rsp) +} - if raw, found := object["size"]; found { - err = json.Unmarshal(raw, &a.Size) - if err != nil { - return fmt.Errorf("error reading 'size': %w", err) - } - delete(object, "size") +// PostStreamsNameDashboardsBulkWithBodyWithResponse request with arbitrary body returning *PostStreamsNameDashboardsBulkResponse +func (c *ClientWithResponses) PostStreamsNameDashboardsBulkWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameDashboardsBulkResponse, error) { + rsp, err := c.PostStreamsNameDashboardsBulkWithBody(ctx, name, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostStreamsNameDashboardsBulkResponse(rsp) +} - if raw, found := object["src"]; found { - err = json.Unmarshal(raw, &a.Src) - if err != nil { - return fmt.Errorf("error reading 'src': %w", err) - } - delete(object, "src") +func (c *ClientWithResponses) PostStreamsNameDashboardsBulkWithResponse(ctx context.Context, name string, body PostStreamsNameDashboardsBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsNameDashboardsBulkResponse, error) { + rsp, err := c.PostStreamsNameDashboardsBulk(ctx, name, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostStreamsNameDashboardsBulkResponse(rsp) +} - if raw, found := object["title"]; found { - err = json.Unmarshal(raw, &a.Title) - if err != nil { - return fmt.Errorf("error reading 'title': %w", err) - } - delete(object, "title") +// DeleteStreamsNameDashboardsDashboardidWithBodyWithResponse request with arbitrary body returning *DeleteStreamsNameDashboardsDashboardidResponse +func (c *ClientWithResponses) DeleteStreamsNameDashboardsDashboardidWithBodyWithResponse(ctx context.Context, name string, dashboardId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteStreamsNameDashboardsDashboardidResponse, error) { + rsp, err := c.DeleteStreamsNameDashboardsDashboardidWithBody(ctx, name, dashboardId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteStreamsNameDashboardsDashboardidResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +func (c *ClientWithResponses) DeleteStreamsNameDashboardsDashboardidWithResponse(ctx context.Context, name string, dashboardId string, body DeleteStreamsNameDashboardsDashboardidJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteStreamsNameDashboardsDashboardidResponse, error) { + rsp, err := c.DeleteStreamsNameDashboardsDashboardid(ctx, name, dashboardId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteStreamsNameDashboardsDashboardidResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// PutStreamsNameDashboardsDashboardidWithBodyWithResponse request with arbitrary body returning *PutStreamsNameDashboardsDashboardidResponse +func (c *ClientWithResponses) PutStreamsNameDashboardsDashboardidWithBodyWithResponse(ctx context.Context, name string, dashboardId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameDashboardsDashboardidResponse, error) { + rsp, err := c.PutStreamsNameDashboardsDashboardidWithBody(ctx, name, dashboardId, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePutStreamsNameDashboardsDashboardidResponse(rsp) } -// Override default JSON handling for PackageListItem_Icons_Item to handle AdditionalProperties -func (a PackageListItem_Icons_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +func (c *ClientWithResponses) PutStreamsNameDashboardsDashboardidWithResponse(ctx context.Context, name string, dashboardId string, body PutStreamsNameDashboardsDashboardidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameDashboardsDashboardidResponse, error) { + rsp, err := c.PutStreamsNameDashboardsDashboardid(ctx, name, dashboardId, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePutStreamsNameDashboardsDashboardidResponse(rsp) +} - if a.DarkMode != nil { - object["dark_mode"], err = json.Marshal(a.DarkMode) - if err != nil { - return nil, fmt.Errorf("error marshaling 'dark_mode': %w", err) - } +// GetStreamsNameQueriesWithBodyWithResponse request with arbitrary body returning *GetStreamsNameQueriesResponse +func (c *ClientWithResponses) GetStreamsNameQueriesWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameQueriesResponse, error) { + rsp, err := c.GetStreamsNameQueriesWithBody(ctx, name, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseGetStreamsNameQueriesResponse(rsp) +} - if a.Path != nil { - object["path"], err = json.Marshal(a.Path) - if err != nil { - return nil, fmt.Errorf("error marshaling 'path': %w", err) - } +func (c *ClientWithResponses) GetStreamsNameQueriesWithResponse(ctx context.Context, name string, body GetStreamsNameQueriesJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameQueriesResponse, error) { + rsp, err := c.GetStreamsNameQueries(ctx, name, body, reqEditors...) + if err != nil { + return nil, err } + return ParseGetStreamsNameQueriesResponse(rsp) +} - if a.Size != nil { - object["size"], err = json.Marshal(a.Size) - if err != nil { - return nil, fmt.Errorf("error marshaling 'size': %w", err) - } +// PostStreamsNameQueriesBulkWithBodyWithResponse request with arbitrary body returning *PostStreamsNameQueriesBulkResponse +func (c *ClientWithResponses) PostStreamsNameQueriesBulkWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameQueriesBulkResponse, error) { + rsp, err := c.PostStreamsNameQueriesBulkWithBody(ctx, name, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostStreamsNameQueriesBulkResponse(rsp) +} - object["src"], err = json.Marshal(a.Src) +func (c *ClientWithResponses) PostStreamsNameQueriesBulkWithResponse(ctx context.Context, name string, body PostStreamsNameQueriesBulkJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsNameQueriesBulkResponse, error) { + rsp, err := c.PostStreamsNameQueriesBulk(ctx, name, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'src': %w", err) + return nil, err } + return ParsePostStreamsNameQueriesBulkResponse(rsp) +} - if a.Title != nil { - object["title"], err = json.Marshal(a.Title) - if err != nil { - return nil, fmt.Errorf("error marshaling 'title': %w", err) - } +// DeleteStreamsNameQueriesQueryidWithBodyWithResponse request with arbitrary body returning *DeleteStreamsNameQueriesQueryidResponse +func (c *ClientWithResponses) DeleteStreamsNameQueriesQueryidWithBodyWithResponse(ctx context.Context, name string, queryId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteStreamsNameQueriesQueryidResponse, error) { + rsp, err := c.DeleteStreamsNameQueriesQueryidWithBody(ctx, name, queryId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteStreamsNameQueriesQueryidResponse(rsp) +} - if a.Type != nil { - object["type"], err = json.Marshal(a.Type) - if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) - } +func (c *ClientWithResponses) DeleteStreamsNameQueriesQueryidWithResponse(ctx context.Context, name string, queryId string, body DeleteStreamsNameQueriesQueryidJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteStreamsNameQueriesQueryidResponse, error) { + rsp, err := c.DeleteStreamsNameQueriesQueryid(ctx, name, queryId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteStreamsNameQueriesQueryidResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// PutStreamsNameQueriesQueryidWithBodyWithResponse request with arbitrary body returning *PutStreamsNameQueriesQueryidResponse +func (c *ClientWithResponses) PutStreamsNameQueriesQueryidWithBodyWithResponse(ctx context.Context, name string, queryId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameQueriesQueryidResponse, error) { + rsp, err := c.PutStreamsNameQueriesQueryidWithBody(ctx, name, queryId, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePutStreamsNameQueriesQueryidResponse(rsp) } -// Getter for additional properties for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item. Returns the specified -// element and whether it was found -func (a PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) PutStreamsNameQueriesQueryidWithResponse(ctx context.Context, name string, queryId string, body PutStreamsNameQueriesQueryidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameQueriesQueryidResponse, error) { + rsp, err := c.PutStreamsNameQueriesQueryid(ctx, name, queryId, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePutStreamsNameQueriesQueryidResponse(rsp) } -// Setter for additional properties for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item -func (a *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// GetStreamsNameRulesWithBodyWithResponse request with arbitrary body returning *GetStreamsNameRulesResponse +func (c *ClientWithResponses) GetStreamsNameRulesWithBodyWithResponse(ctx context.Context, name string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameRulesResponse, error) { + rsp, err := c.GetStreamsNameRulesWithBody(ctx, name, contentType, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseGetStreamsNameRulesResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties -func (a *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +func (c *ClientWithResponses) GetStreamsNameRulesWithResponse(ctx context.Context, name string, body GetStreamsNameRulesJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameRulesResponse, error) { + rsp, err := c.GetStreamsNameRules(ctx, name, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseGetStreamsNameRulesResponse(rsp) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// DeleteStreamsNameRulesRuleidWithBodyWithResponse request with arbitrary body returning *DeleteStreamsNameRulesRuleidResponse +func (c *ClientWithResponses) DeleteStreamsNameRulesRuleidWithBodyWithResponse(ctx context.Context, name string, ruleId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteStreamsNameRulesRuleidResponse, error) { + rsp, err := c.DeleteStreamsNameRulesRuleidWithBody(ctx, name, ruleId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteStreamsNameRulesRuleidResponse(rsp) +} - if raw, found := object["originId"]; found { - err = json.Unmarshal(raw, &a.OriginId) - if err != nil { - return fmt.Errorf("error reading 'originId': %w", err) - } - delete(object, "originId") +func (c *ClientWithResponses) DeleteStreamsNameRulesRuleidWithResponse(ctx context.Context, name string, ruleId string, body DeleteStreamsNameRulesRuleidJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteStreamsNameRulesRuleidResponse, error) { + rsp, err := c.DeleteStreamsNameRulesRuleid(ctx, name, ruleId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteStreamsNameRulesRuleidResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +// PutStreamsNameRulesRuleidWithBodyWithResponse request with arbitrary body returning *PutStreamsNameRulesRuleidResponse +func (c *ClientWithResponses) PutStreamsNameRulesRuleidWithBodyWithResponse(ctx context.Context, name string, ruleId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutStreamsNameRulesRuleidResponse, error) { + rsp, err := c.PutStreamsNameRulesRuleidWithBody(ctx, name, ruleId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutStreamsNameRulesRuleidResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PutStreamsNameRulesRuleidWithResponse(ctx context.Context, name string, ruleId string, body PutStreamsNameRulesRuleidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutStreamsNameRulesRuleidResponse, error) { + rsp, err := c.PutStreamsNameRulesRuleid(ctx, name, ruleId, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePutStreamsNameRulesRuleidResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item to handle AdditionalProperties -func (a PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// GetStreamsNameSignificantEventsWithBodyWithResponse request with arbitrary body returning *GetStreamsNameSignificantEventsResponse +func (c *ClientWithResponses) GetStreamsNameSignificantEventsWithBodyWithResponse(ctx context.Context, name string, params *GetStreamsNameSignificantEventsParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameSignificantEventsResponse, error) { + rsp, err := c.GetStreamsNameSignificantEventsWithBody(ctx, name, params, contentType, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetStreamsNameSignificantEventsResponse(rsp) +} - object["id"], err = json.Marshal(a.Id) +func (c *ClientWithResponses) GetStreamsNameSignificantEventsWithResponse(ctx context.Context, name string, params *GetStreamsNameSignificantEventsParams, body GetStreamsNameSignificantEventsJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameSignificantEventsResponse, error) { + rsp, err := c.GetStreamsNameSignificantEvents(ctx, name, params, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) + return nil, err } + return ParseGetStreamsNameSignificantEventsResponse(rsp) +} - if a.OriginId != nil { - object["originId"], err = json.Marshal(a.OriginId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'originId': %w", err) - } +// GetStreamsNameSignificantEventsGenerateWithBodyWithResponse request with arbitrary body returning *GetStreamsNameSignificantEventsGenerateResponse +func (c *ClientWithResponses) GetStreamsNameSignificantEventsGenerateWithBodyWithResponse(ctx context.Context, name string, params *GetStreamsNameSignificantEventsGenerateParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*GetStreamsNameSignificantEventsGenerateResponse, error) { + rsp, err := c.GetStreamsNameSignificantEventsGenerateWithBody(ctx, name, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseGetStreamsNameSignificantEventsGenerateResponse(rsp) +} - object["type"], err = json.Marshal(a.Type) +func (c *ClientWithResponses) GetStreamsNameSignificantEventsGenerateWithResponse(ctx context.Context, name string, params *GetStreamsNameSignificantEventsGenerateParams, body GetStreamsNameSignificantEventsGenerateJSONRequestBody, reqEditors ...RequestEditorFn) (*GetStreamsNameSignificantEventsGenerateResponse, error) { + rsp, err := c.GetStreamsNameSignificantEventsGenerate(ctx, name, params, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) + return nil, err } + return ParseGetStreamsNameSignificantEventsGenerateResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// PostStreamsNameSignificantEventsPreviewWithBodyWithResponse request with arbitrary body returning *PostStreamsNameSignificantEventsPreviewResponse +func (c *ClientWithResponses) PostStreamsNameSignificantEventsPreviewWithBodyWithResponse(ctx context.Context, name string, params *PostStreamsNameSignificantEventsPreviewParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostStreamsNameSignificantEventsPreviewResponse, error) { + rsp, err := c.PostStreamsNameSignificantEventsPreviewWithBody(ctx, name, params, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostStreamsNameSignificantEventsPreviewResponse(rsp) } -// Getter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features. Returns the specified -// element and whether it was found -func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) PostStreamsNameSignificantEventsPreviewWithResponse(ctx context.Context, name string, params *PostStreamsNameSignificantEventsPreviewParams, body PostStreamsNameSignificantEventsPreviewJSONRequestBody, reqEditors ...RequestEditorFn) (*PostStreamsNameSignificantEventsPreviewResponse, error) { + rsp, err := c.PostStreamsNameSignificantEventsPreview(ctx, name, params, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePostStreamsNameSignificantEventsPreviewResponse(rsp) } -// Setter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features -func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// PostSyntheticsMonitorTestWithResponse request returning *PostSyntheticsMonitorTestResponse +func (c *ClientWithResponses) PostSyntheticsMonitorTestWithResponse(ctx context.Context, monitorId string, reqEditors ...RequestEditorFn) (*PostSyntheticsMonitorTestResponse, error) { + rsp, err := c.PostSyntheticsMonitorTest(ctx, monitorId, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePostSyntheticsMonitorTestResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties -func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// GetSyntheticMonitorsWithResponse request returning *GetSyntheticMonitorsResponse +func (c *ClientWithResponses) GetSyntheticMonitorsWithResponse(ctx context.Context, params *GetSyntheticMonitorsParams, reqEditors ...RequestEditorFn) (*GetSyntheticMonitorsResponse, error) { + rsp, err := c.GetSyntheticMonitors(ctx, params, reqEditors...) if err != nil { - return err + return nil, err } + return ParseGetSyntheticMonitorsResponse(rsp) +} - if raw, found := object["doc_value_only_numeric"]; found { - err = json.Unmarshal(raw, &a.DocValueOnlyNumeric) - if err != nil { - return fmt.Errorf("error reading 'doc_value_only_numeric': %w", err) - } - delete(object, "doc_value_only_numeric") +// PostSyntheticMonitorsWithBodyWithResponse request with arbitrary body returning *PostSyntheticMonitorsResponse +func (c *ClientWithResponses) PostSyntheticMonitorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostSyntheticMonitorsResponse, error) { + rsp, err := c.PostSyntheticMonitorsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostSyntheticMonitorsResponse(rsp) +} - if raw, found := object["doc_value_only_other"]; found { - err = json.Unmarshal(raw, &a.DocValueOnlyOther) - if err != nil { - return fmt.Errorf("error reading 'doc_value_only_other': %w", err) - } - delete(object, "doc_value_only_other") +func (c *ClientWithResponses) PostSyntheticMonitorsWithResponse(ctx context.Context, body PostSyntheticMonitorsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostSyntheticMonitorsResponse, error) { + rsp, err := c.PostSyntheticMonitors(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostSyntheticMonitorsResponse(rsp) +} + +// DeleteSyntheticMonitorsWithBodyWithResponse request with arbitrary body returning *DeleteSyntheticMonitorsResponse +func (c *ClientWithResponses) DeleteSyntheticMonitorsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteSyntheticMonitorsResponse, error) { + rsp, err := c.DeleteSyntheticMonitorsWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteSyntheticMonitorsResponse(rsp) +} - if raw, found := object["synthetic_source"]; found { - err = json.Unmarshal(raw, &a.SyntheticSource) - if err != nil { - return fmt.Errorf("error reading 'synthetic_source': %w", err) - } - delete(object, "synthetic_source") +func (c *ClientWithResponses) DeleteSyntheticMonitorsWithResponse(ctx context.Context, body DeleteSyntheticMonitorsJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteSyntheticMonitorsResponse, error) { + rsp, err := c.DeleteSyntheticMonitors(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteSyntheticMonitorsResponse(rsp) +} - if raw, found := object["tsdb"]; found { - err = json.Unmarshal(raw, &a.Tsdb) - if err != nil { - return fmt.Errorf("error reading 'tsdb': %w", err) - } - delete(object, "tsdb") +// DeleteSyntheticMonitorWithResponse request returning *DeleteSyntheticMonitorResponse +func (c *ClientWithResponses) DeleteSyntheticMonitorWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteSyntheticMonitorResponse, error) { + rsp, err := c.DeleteSyntheticMonitor(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteSyntheticMonitorResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// GetSyntheticMonitorWithResponse request returning *GetSyntheticMonitorResponse +func (c *ClientWithResponses) GetSyntheticMonitorWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetSyntheticMonitorResponse, error) { + rsp, err := c.GetSyntheticMonitor(ctx, id, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseGetSyntheticMonitorResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features to handle AdditionalProperties -func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Features) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - if a.DocValueOnlyNumeric != nil { - object["doc_value_only_numeric"], err = json.Marshal(a.DocValueOnlyNumeric) - if err != nil { - return nil, fmt.Errorf("error marshaling 'doc_value_only_numeric': %w", err) - } +// PutSyntheticMonitorWithBodyWithResponse request with arbitrary body returning *PutSyntheticMonitorResponse +func (c *ClientWithResponses) PutSyntheticMonitorWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutSyntheticMonitorResponse, error) { + rsp, err := c.PutSyntheticMonitorWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutSyntheticMonitorResponse(rsp) +} - if a.DocValueOnlyOther != nil { - object["doc_value_only_other"], err = json.Marshal(a.DocValueOnlyOther) - if err != nil { - return nil, fmt.Errorf("error marshaling 'doc_value_only_other': %w", err) - } +func (c *ClientWithResponses) PutSyntheticMonitorWithResponse(ctx context.Context, id string, body PutSyntheticMonitorJSONRequestBody, reqEditors ...RequestEditorFn) (*PutSyntheticMonitorResponse, error) { + rsp, err := c.PutSyntheticMonitor(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutSyntheticMonitorResponse(rsp) +} - if a.SyntheticSource != nil { - object["synthetic_source"], err = json.Marshal(a.SyntheticSource) - if err != nil { - return nil, fmt.Errorf("error marshaling 'synthetic_source': %w", err) - } +// GetParametersWithResponse request returning *GetParametersResponse +func (c *ClientWithResponses) GetParametersWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetParametersResponse, error) { + rsp, err := c.GetParameters(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseGetParametersResponse(rsp) +} - if a.Tsdb != nil { - object["tsdb"], err = json.Marshal(a.Tsdb) - if err != nil { - return nil, fmt.Errorf("error marshaling 'tsdb': %w", err) - } +// PostParametersWithBodyWithResponse request with arbitrary body returning *PostParametersResponse +func (c *ClientWithResponses) PostParametersWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostParametersResponse, error) { + rsp, err := c.PostParametersWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePostParametersResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) PostParametersWithResponse(ctx context.Context, body PostParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*PostParametersResponse, error) { + rsp, err := c.PostParameters(ctx, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePostParametersResponse(rsp) } -// Getter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item. Returns the specified -// element and whether it was found -func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// DeleteParametersWithBodyWithResponse request with arbitrary body returning *DeleteParametersResponse +func (c *ClientWithResponses) DeleteParametersWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteParametersResponse, error) { + rsp, err := c.DeleteParametersWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseDeleteParametersResponse(rsp) } -// Setter for additional properties for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item -func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) DeleteParametersWithResponse(ctx context.Context, body DeleteParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteParametersResponse, error) { + rsp, err := c.DeleteParameters(ctx, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseDeleteParametersResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties -func (a *PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// DeleteParameterWithResponse request returning *DeleteParameterResponse +func (c *ClientWithResponses) DeleteParameterWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteParameterResponse, error) { + rsp, err := c.DeleteParameter(ctx, id, reqEditors...) if err != nil { - return err + return nil, err } + return ParseDeleteParameterResponse(rsp) +} - if raw, found := object["data_stream"]; found { - err = json.Unmarshal(raw, &a.DataStream) - if err != nil { - return fmt.Errorf("error reading 'data_stream': %w", err) - } - delete(object, "data_stream") +// GetParameterWithResponse request returning *GetParameterResponse +func (c *ClientWithResponses) GetParameterWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetParameterResponse, error) { + rsp, err := c.GetParameter(ctx, id, reqEditors...) + if err != nil { + return nil, err } + return ParseGetParameterResponse(rsp) +} - if raw, found := object["features"]; found { - err = json.Unmarshal(raw, &a.Features) - if err != nil { - return fmt.Errorf("error reading 'features': %w", err) - } - delete(object, "features") +// PutParameterWithBodyWithResponse request with arbitrary body returning *PutParameterResponse +func (c *ClientWithResponses) PutParameterWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutParameterResponse, error) { + rsp, err := c.PutParameterWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutParameterResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +func (c *ClientWithResponses) PutParameterWithResponse(ctx context.Context, id string, body PutParameterJSONRequestBody, reqEditors ...RequestEditorFn) (*PutParameterResponse, error) { + rsp, err := c.PutParameter(ctx, id, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePutParameterResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item to handle AdditionalProperties -func (a PackageListItem_InstallationInfo_ExperimentalDataStreamFeatures_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// GetPrivateLocationsWithResponse request returning *GetPrivateLocationsResponse +func (c *ClientWithResponses) GetPrivateLocationsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetPrivateLocationsResponse, error) { + rsp, err := c.GetPrivateLocations(ctx, reqEditors...) + if err != nil { + return nil, err + } + return ParseGetPrivateLocationsResponse(rsp) +} - object["data_stream"], err = json.Marshal(a.DataStream) +// PostPrivateLocationWithBodyWithResponse request with arbitrary body returning *PostPrivateLocationResponse +func (c *ClientWithResponses) PostPrivateLocationWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostPrivateLocationResponse, error) { + rsp, err := c.PostPrivateLocationWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'data_stream': %w", err) + return nil, err } + return ParsePostPrivateLocationResponse(rsp) +} - object["features"], err = json.Marshal(a.Features) +func (c *ClientWithResponses) PostPrivateLocationWithResponse(ctx context.Context, body PostPrivateLocationJSONRequestBody, reqEditors ...RequestEditorFn) (*PostPrivateLocationResponse, error) { + rsp, err := c.PostPrivateLocation(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'features': %w", err) + return nil, err } + return ParsePostPrivateLocationResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// DeletePrivateLocationWithResponse request returning *DeletePrivateLocationResponse +func (c *ClientWithResponses) DeletePrivateLocationWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeletePrivateLocationResponse, error) { + rsp, err := c.DeletePrivateLocation(ctx, id, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseDeletePrivateLocationResponse(rsp) } -// Getter for additional properties for PackageListItem_InstallationInfo_InstalledEs_Item. Returns the specified -// element and whether it was found -func (a PackageListItem_InstallationInfo_InstalledEs_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// GetPrivateLocationWithResponse request returning *GetPrivateLocationResponse +func (c *ClientWithResponses) GetPrivateLocationWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetPrivateLocationResponse, error) { + rsp, err := c.GetPrivateLocation(ctx, id, reqEditors...) + if err != nil { + return nil, err } - return + return ParseGetPrivateLocationResponse(rsp) } -// Setter for additional properties for PackageListItem_InstallationInfo_InstalledEs_Item -func (a *PackageListItem_InstallationInfo_InstalledEs_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// PutPrivateLocationWithBodyWithResponse request with arbitrary body returning *PutPrivateLocationResponse +func (c *ClientWithResponses) PutPrivateLocationWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutPrivateLocationResponse, error) { + rsp, err := c.PutPrivateLocationWithBody(ctx, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParsePutPrivateLocationResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_InstalledEs_Item to handle AdditionalProperties -func (a *PackageListItem_InstallationInfo_InstalledEs_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +func (c *ClientWithResponses) PutPrivateLocationWithResponse(ctx context.Context, id string, body PutPrivateLocationJSONRequestBody, reqEditors ...RequestEditorFn) (*PutPrivateLocationResponse, error) { + rsp, err := c.PutPrivateLocation(ctx, id, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePutPrivateLocationResponse(rsp) +} - if raw, found := object["deferred"]; found { - err = json.Unmarshal(raw, &a.Deferred) - if err != nil { - return fmt.Errorf("error reading 'deferred': %w", err) - } - delete(object, "deferred") +// TaskManagerHealthWithResponse request returning *TaskManagerHealthResponse +func (c *ClientWithResponses) TaskManagerHealthWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*TaskManagerHealthResponse, error) { + rsp, err := c.TaskManagerHealth(ctx, reqEditors...) + if err != nil { + return nil, err } + return ParseTaskManagerHealthResponse(rsp) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +// DeleteTimelinesWithBodyWithResponse request with arbitrary body returning *DeleteTimelinesResponse +func (c *ClientWithResponses) DeleteTimelinesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteTimelinesResponse, error) { + rsp, err := c.DeleteTimelinesWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteTimelinesResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +func (c *ClientWithResponses) DeleteTimelinesWithResponse(ctx context.Context, body DeleteTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteTimelinesResponse, error) { + rsp, err := c.DeleteTimelines(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteTimelinesResponse(rsp) +} - if raw, found := object["version"]; found { - err = json.Unmarshal(raw, &a.Version) - if err != nil { - return fmt.Errorf("error reading 'version': %w", err) - } - delete(object, "version") +// GetTimelineWithResponse request returning *GetTimelineResponse +func (c *ClientWithResponses) GetTimelineWithResponse(ctx context.Context, params *GetTimelineParams, reqEditors ...RequestEditorFn) (*GetTimelineResponse, error) { + rsp, err := c.GetTimeline(ctx, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetTimelineResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// PatchTimelineWithBodyWithResponse request with arbitrary body returning *PatchTimelineResponse +func (c *ClientWithResponses) PatchTimelineWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchTimelineResponse, error) { + rsp, err := c.PatchTimelineWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePatchTimelineResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_InstalledEs_Item to handle AdditionalProperties -func (a PackageListItem_InstallationInfo_InstalledEs_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +func (c *ClientWithResponses) PatchTimelineWithResponse(ctx context.Context, body PatchTimelineJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchTimelineResponse, error) { + rsp, err := c.PatchTimeline(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePatchTimelineResponse(rsp) +} - if a.Deferred != nil { - object["deferred"], err = json.Marshal(a.Deferred) - if err != nil { - return nil, fmt.Errorf("error marshaling 'deferred': %w", err) - } +// CreateTimelinesWithBodyWithResponse request with arbitrary body returning *CreateTimelinesResponse +func (c *ClientWithResponses) CreateTimelinesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateTimelinesResponse, error) { + rsp, err := c.CreateTimelinesWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateTimelinesResponse(rsp) +} - object["id"], err = json.Marshal(a.Id) +func (c *ClientWithResponses) CreateTimelinesWithResponse(ctx context.Context, body CreateTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateTimelinesResponse, error) { + rsp, err := c.CreateTimelines(ctx, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) + return nil, err } + return ParseCreateTimelinesResponse(rsp) +} - object["type"], err = json.Marshal(a.Type) +// CopyTimelineWithBodyWithResponse request with arbitrary body returning *CopyTimelineResponse +func (c *ClientWithResponses) CopyTimelineWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CopyTimelineResponse, error) { + rsp, err := c.CopyTimelineWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) + return nil, err } + return ParseCopyTimelineResponse(rsp) +} - if a.Version != nil { - object["version"], err = json.Marshal(a.Version) - if err != nil { - return nil, fmt.Errorf("error marshaling 'version': %w", err) - } +func (c *ClientWithResponses) CopyTimelineWithResponse(ctx context.Context, body CopyTimelineJSONRequestBody, reqEditors ...RequestEditorFn) (*CopyTimelineResponse, error) { + rsp, err := c.CopyTimeline(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCopyTimelineResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// GetDraftTimelinesWithResponse request returning *GetDraftTimelinesResponse +func (c *ClientWithResponses) GetDraftTimelinesWithResponse(ctx context.Context, params *GetDraftTimelinesParams, reqEditors ...RequestEditorFn) (*GetDraftTimelinesResponse, error) { + rsp, err := c.GetDraftTimelines(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseGetDraftTimelinesResponse(rsp) } -// Getter for additional properties for PackageListItem_InstallationInfo_InstalledKibana_Item. Returns the specified -// element and whether it was found -func (a PackageListItem_InstallationInfo_InstalledKibana_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// CleanDraftTimelinesWithBodyWithResponse request with arbitrary body returning *CleanDraftTimelinesResponse +func (c *ClientWithResponses) CleanDraftTimelinesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CleanDraftTimelinesResponse, error) { + rsp, err := c.CleanDraftTimelinesWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseCleanDraftTimelinesResponse(rsp) } -// Setter for additional properties for PackageListItem_InstallationInfo_InstalledKibana_Item -func (a *PackageListItem_InstallationInfo_InstalledKibana_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) CleanDraftTimelinesWithResponse(ctx context.Context, body CleanDraftTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*CleanDraftTimelinesResponse, error) { + rsp, err := c.CleanDraftTimelines(ctx, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseCleanDraftTimelinesResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties -func (a *PackageListItem_InstallationInfo_InstalledKibana_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// ExportTimelinesWithBodyWithResponse request with arbitrary body returning *ExportTimelinesResponse +func (c *ClientWithResponses) ExportTimelinesWithBodyWithResponse(ctx context.Context, params *ExportTimelinesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ExportTimelinesResponse, error) { + rsp, err := c.ExportTimelinesWithBody(ctx, params, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseExportTimelinesResponse(rsp) +} - if raw, found := object["id"]; found { - err = json.Unmarshal(raw, &a.Id) - if err != nil { - return fmt.Errorf("error reading 'id': %w", err) - } - delete(object, "id") +func (c *ClientWithResponses) ExportTimelinesWithResponse(ctx context.Context, params *ExportTimelinesParams, body ExportTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*ExportTimelinesResponse, error) { + rsp, err := c.ExportTimelines(ctx, params, body, reqEditors...) + if err != nil { + return nil, err } + return ParseExportTimelinesResponse(rsp) +} - if raw, found := object["originId"]; found { - err = json.Unmarshal(raw, &a.OriginId) - if err != nil { - return fmt.Errorf("error reading 'originId': %w", err) - } - delete(object, "originId") +// PersistFavoriteRouteWithBodyWithResponse request with arbitrary body returning *PersistFavoriteRouteResponse +func (c *ClientWithResponses) PersistFavoriteRouteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PersistFavoriteRouteResponse, error) { + rsp, err := c.PersistFavoriteRouteWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePersistFavoriteRouteResponse(rsp) +} - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") +func (c *ClientWithResponses) PersistFavoriteRouteWithResponse(ctx context.Context, body PersistFavoriteRouteJSONRequestBody, reqEditors ...RequestEditorFn) (*PersistFavoriteRouteResponse, error) { + rsp, err := c.PersistFavoriteRoute(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePersistFavoriteRouteResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// ImportTimelinesWithBodyWithResponse request with arbitrary body returning *ImportTimelinesResponse +func (c *ClientWithResponses) ImportTimelinesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*ImportTimelinesResponse, error) { + rsp, err := c.ImportTimelinesWithBody(ctx, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseImportTimelinesResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_InstalledKibana_Item to handle AdditionalProperties -func (a PackageListItem_InstallationInfo_InstalledKibana_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +func (c *ClientWithResponses) ImportTimelinesWithResponse(ctx context.Context, body ImportTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*ImportTimelinesResponse, error) { + rsp, err := c.ImportTimelines(ctx, body, reqEditors...) + if err != nil { + return nil, err + } + return ParseImportTimelinesResponse(rsp) +} - object["id"], err = json.Marshal(a.Id) +// InstallPrepackedTimelinesWithBodyWithResponse request with arbitrary body returning *InstallPrepackedTimelinesResponse +func (c *ClientWithResponses) InstallPrepackedTimelinesWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*InstallPrepackedTimelinesResponse, error) { + rsp, err := c.InstallPrepackedTimelinesWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'id': %w", err) + return nil, err } + return ParseInstallPrepackedTimelinesResponse(rsp) +} - if a.OriginId != nil { - object["originId"], err = json.Marshal(a.OriginId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'originId': %w", err) - } +func (c *ClientWithResponses) InstallPrepackedTimelinesWithResponse(ctx context.Context, body InstallPrepackedTimelinesJSONRequestBody, reqEditors ...RequestEditorFn) (*InstallPrepackedTimelinesResponse, error) { + rsp, err := c.InstallPrepackedTimelines(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParseInstallPrepackedTimelinesResponse(rsp) +} - object["type"], err = json.Marshal(a.Type) +// ResolveTimelineWithResponse request returning *ResolveTimelineResponse +func (c *ClientWithResponses) ResolveTimelineWithResponse(ctx context.Context, params *ResolveTimelineParams, reqEditors ...RequestEditorFn) (*ResolveTimelineResponse, error) { + rsp, err := c.ResolveTimeline(ctx, params, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) + return nil, err } + return ParseResolveTimelineResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// GetTimelinesWithResponse request returning *GetTimelinesResponse +func (c *ClientWithResponses) GetTimelinesWithResponse(ctx context.Context, params *GetTimelinesParams, reqEditors ...RequestEditorFn) (*GetTimelinesResponse, error) { + rsp, err := c.GetTimelines(ctx, params, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseGetTimelinesResponse(rsp) } -// Getter for additional properties for PackageListItem_InstallationInfo_LatestExecutedState. Returns the specified -// element and whether it was found -func (a PackageListItem_InstallationInfo_LatestExecutedState) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// GetUpgradeStatusWithResponse request returning *GetUpgradeStatusResponse +func (c *ClientWithResponses) GetUpgradeStatusWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetUpgradeStatusResponse, error) { + rsp, err := c.GetUpgradeStatus(ctx, reqEditors...) + if err != nil { + return nil, err } - return + return ParseGetUpgradeStatusResponse(rsp) } -// Setter for additional properties for PackageListItem_InstallationInfo_LatestExecutedState -func (a *PackageListItem_InstallationInfo_LatestExecutedState) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// GetUptimeSettingsWithResponse request returning *GetUptimeSettingsResponse +func (c *ClientWithResponses) GetUptimeSettingsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetUptimeSettingsResponse, error) { + rsp, err := c.GetUptimeSettings(ctx, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseGetUptimeSettingsResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_LatestExecutedState to handle AdditionalProperties -func (a *PackageListItem_InstallationInfo_LatestExecutedState) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// PutUptimeSettingsWithBodyWithResponse request with arbitrary body returning *PutUptimeSettingsResponse +func (c *ClientWithResponses) PutUptimeSettingsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutUptimeSettingsResponse, error) { + rsp, err := c.PutUptimeSettingsWithBody(ctx, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParsePutUptimeSettingsResponse(rsp) +} - if raw, found := object["error"]; found { - err = json.Unmarshal(raw, &a.Error) - if err != nil { - return fmt.Errorf("error reading 'error': %w", err) - } - delete(object, "error") +func (c *ClientWithResponses) PutUptimeSettingsWithResponse(ctx context.Context, body PutUptimeSettingsJSONRequestBody, reqEditors ...RequestEditorFn) (*PutUptimeSettingsResponse, error) { + rsp, err := c.PutUptimeSettings(ctx, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutUptimeSettingsResponse(rsp) +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// DeleteActionsConnectorIdWithResponse request returning *DeleteActionsConnectorIdResponse +func (c *ClientWithResponses) DeleteActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteActionsConnectorIdResponse, error) { + rsp, err := c.DeleteActionsConnectorId(ctx, spaceId, id, reqEditors...) + if err != nil { + return nil, err } + return ParseDeleteActionsConnectorIdResponse(rsp) +} - if raw, found := object["started_at"]; found { - err = json.Unmarshal(raw, &a.StartedAt) - if err != nil { - return fmt.Errorf("error reading 'started_at': %w", err) - } - delete(object, "started_at") +// GetActionsConnectorIdWithResponse request returning *GetActionsConnectorIdResponse +func (c *ClientWithResponses) GetActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetActionsConnectorIdResponse, error) { + rsp, err := c.GetActionsConnectorId(ctx, spaceId, id, reqEditors...) + if err != nil { + return nil, err } + return ParseGetActionsConnectorIdResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// PostActionsConnectorIdWithBodyWithResponse request with arbitrary body returning *PostActionsConnectorIdResponse +func (c *ClientWithResponses) PostActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) { + rsp, err := c.PostActionsConnectorIdWithBody(ctx, spaceId, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePostActionsConnectorIdResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_LatestExecutedState to handle AdditionalProperties -func (a PackageListItem_InstallationInfo_LatestExecutedState) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +func (c *ClientWithResponses) PostActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) { + rsp, err := c.PostActionsConnectorId(ctx, spaceId, id, body, reqEditors...) + if err != nil { + return nil, err + } + return ParsePostActionsConnectorIdResponse(rsp) +} - if a.Error != nil { - object["error"], err = json.Marshal(a.Error) - if err != nil { - return nil, fmt.Errorf("error marshaling 'error': %w", err) - } +// PutActionsConnectorIdWithBodyWithResponse request with arbitrary body returning *PutActionsConnectorIdResponse +func (c *ClientWithResponses) PutActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) { + rsp, err := c.PutActionsConnectorIdWithBody(ctx, spaceId, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutActionsConnectorIdResponse(rsp) +} - if a.Name != nil { - object["name"], err = json.Marshal(a.Name) - if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) - } +func (c *ClientWithResponses) PutActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) { + rsp, err := c.PutActionsConnectorId(ctx, spaceId, id, body, reqEditors...) + if err != nil { + return nil, err } + return ParsePutActionsConnectorIdResponse(rsp) +} - if a.StartedAt != nil { - object["started_at"], err = json.Marshal(a.StartedAt) - if err != nil { - return nil, fmt.Errorf("error marshaling 'started_at': %w", err) - } +// GetActionsConnectorsWithResponse request returning *GetActionsConnectorsResponse +func (c *ClientWithResponses) GetActionsConnectorsWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetActionsConnectorsResponse, error) { + rsp, err := c.GetActionsConnectors(ctx, spaceId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetActionsConnectorsResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// GetAllDataViewsDefaultWithResponse request returning *GetAllDataViewsDefaultResponse +func (c *ClientWithResponses) GetAllDataViewsDefaultWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetAllDataViewsDefaultResponse, error) { + rsp, err := c.GetAllDataViewsDefault(ctx, spaceId, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseGetAllDataViewsDefaultResponse(rsp) } -// Getter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error. Returns the specified -// element and whether it was found -func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// CreateDataViewDefaultwWithBodyWithResponse request with arbitrary body returning *CreateDataViewDefaultwResponse +func (c *ClientWithResponses) CreateDataViewDefaultwWithBodyWithResponse(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateDataViewDefaultwResponse, error) { + rsp, err := c.CreateDataViewDefaultwWithBody(ctx, spaceId, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParseCreateDataViewDefaultwResponse(rsp) } -// Setter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error -func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +func (c *ClientWithResponses) CreateDataViewDefaultwWithResponse(ctx context.Context, spaceId SpaceId, body CreateDataViewDefaultwJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateDataViewDefaultwResponse, error) { + rsp, err := c.CreateDataViewDefaultw(ctx, spaceId, body, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseCreateDataViewDefaultwResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties -func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// DeleteDataViewDefaultWithResponse request returning *DeleteDataViewDefaultResponse +func (c *ClientWithResponses) DeleteDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*DeleteDataViewDefaultResponse, error) { + rsp, err := c.DeleteDataViewDefault(ctx, spaceId, viewId, reqEditors...) if err != nil { - return err + return nil, err } + return ParseDeleteDataViewDefaultResponse(rsp) +} - if raw, found := object["message"]; found { - err = json.Unmarshal(raw, &a.Message) - if err != nil { - return fmt.Errorf("error reading 'message': %w", err) - } - delete(object, "message") +// GetDataViewDefaultWithResponse request returning *GetDataViewDefaultResponse +func (c *ClientWithResponses) GetDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*GetDataViewDefaultResponse, error) { + rsp, err := c.GetDataViewDefault(ctx, spaceId, viewId, reqEditors...) + if err != nil { + return nil, err } + return ParseGetDataViewDefaultResponse(rsp) +} - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") +// UpdateDataViewDefaultWithBodyWithResponse request with arbitrary body returning *UpdateDataViewDefaultResponse +func (c *ClientWithResponses) UpdateDataViewDefaultWithBodyWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateDataViewDefaultResponse, error) { + rsp, err := c.UpdateDataViewDefaultWithBody(ctx, spaceId, viewId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateDataViewDefaultResponse(rsp) +} - if raw, found := object["stack"]; found { - err = json.Unmarshal(raw, &a.Stack) - if err != nil { - return fmt.Errorf("error reading 'stack': %w", err) - } - delete(object, "stack") +func (c *ClientWithResponses) UpdateDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateDataViewDefaultResponse, error) { + rsp, err := c.UpdateDataViewDefault(ctx, spaceId, viewId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateDataViewDefaultResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// PostMaintenanceWindowWithBodyWithResponse request with arbitrary body returning *PostMaintenanceWindowResponse +func (c *ClientWithResponses) PostMaintenanceWindowWithBodyWithResponse(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) { + rsp, err := c.PostMaintenanceWindowWithBody(ctx, spaceId, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParsePostMaintenanceWindowResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error to handle AdditionalProperties -func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Error) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - object["message"], err = json.Marshal(a.Message) +func (c *ClientWithResponses) PostMaintenanceWindowWithResponse(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) { + rsp, err := c.PostMaintenanceWindow(ctx, spaceId, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'message': %w", err) + return nil, err } + return ParsePostMaintenanceWindowResponse(rsp) +} - object["name"], err = json.Marshal(a.Name) +// DeleteMaintenanceWindowIdWithResponse request returning *DeleteMaintenanceWindowIdResponse +func (c *ClientWithResponses) DeleteMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteMaintenanceWindowIdResponse, error) { + rsp, err := c.DeleteMaintenanceWindowId(ctx, spaceId, id, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) + return nil, err } + return ParseDeleteMaintenanceWindowIdResponse(rsp) +} - if a.Stack != nil { - object["stack"], err = json.Marshal(a.Stack) - if err != nil { - return nil, fmt.Errorf("error marshaling 'stack': %w", err) - } +// GetMaintenanceWindowIdWithResponse request returning *GetMaintenanceWindowIdResponse +func (c *ClientWithResponses) GetMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetMaintenanceWindowIdResponse, error) { + rsp, err := c.GetMaintenanceWindowId(ctx, spaceId, id, reqEditors...) + if err != nil { + return nil, err } + return ParseGetMaintenanceWindowIdResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +// PatchMaintenanceWindowIdWithBodyWithResponse request with arbitrary body returning *PatchMaintenanceWindowIdResponse +func (c *ClientWithResponses) PatchMaintenanceWindowIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) { + rsp, err := c.PatchMaintenanceWindowIdWithBody(ctx, spaceId, id, contentType, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParsePatchMaintenanceWindowIdResponse(rsp) } -// Getter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item. Returns the specified -// element and whether it was found -func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +func (c *ClientWithResponses) PatchMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) { + rsp, err := c.PatchMaintenanceWindowId(ctx, spaceId, id, body, reqEditors...) + if err != nil { + return nil, err } - return + return ParsePatchMaintenanceWindowIdResponse(rsp) } -// Setter for additional properties for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item -func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// FindSlosOpWithResponse request returning *FindSlosOpResponse +func (c *ClientWithResponses) FindSlosOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, params *FindSlosOpParams, reqEditors ...RequestEditorFn) (*FindSlosOpResponse, error) { + rsp, err := c.FindSlosOp(ctx, spaceId, params, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseFindSlosOpResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties -func (a *PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// CreateSloOpWithBodyWithResponse request with arbitrary body returning *CreateSloOpResponse +func (c *ClientWithResponses) CreateSloOpWithBodyWithResponse(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateSloOpResponse, error) { + rsp, err := c.CreateSloOpWithBody(ctx, spaceId, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseCreateSloOpResponse(rsp) +} - if raw, found := object["created_at"]; found { - err = json.Unmarshal(raw, &a.CreatedAt) - if err != nil { - return fmt.Errorf("error reading 'created_at': %w", err) - } - delete(object, "created_at") +func (c *ClientWithResponses) CreateSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, body CreateSloOpJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateSloOpResponse, error) { + rsp, err := c.CreateSloOp(ctx, spaceId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseCreateSloOpResponse(rsp) +} - if raw, found := object["error"]; found { - err = json.Unmarshal(raw, &a.Error) - if err != nil { - return fmt.Errorf("error reading 'error': %w", err) - } - delete(object, "error") +// BulkDeleteOpWithBodyWithResponse request with arbitrary body returning *BulkDeleteOpResponse +func (c *ClientWithResponses) BulkDeleteOpWithBodyWithResponse(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*BulkDeleteOpResponse, error) { + rsp, err := c.BulkDeleteOpWithBody(ctx, spaceId, contentType, body, reqEditors...) + if err != nil { + return nil, err } + return ParseBulkDeleteOpResponse(rsp) +} - if raw, found := object["target_version"]; found { - err = json.Unmarshal(raw, &a.TargetVersion) - if err != nil { - return fmt.Errorf("error reading 'target_version': %w", err) - } - delete(object, "target_version") +func (c *ClientWithResponses) BulkDeleteOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, body BulkDeleteOpJSONRequestBody, reqEditors ...RequestEditorFn) (*BulkDeleteOpResponse, error) { + rsp, err := c.BulkDeleteOp(ctx, spaceId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseBulkDeleteOpResponse(rsp) +} - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } +// BulkDeleteStatusOpWithResponse request returning *BulkDeleteStatusOpResponse +func (c *ClientWithResponses) BulkDeleteStatusOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, taskId string, reqEditors ...RequestEditorFn) (*BulkDeleteStatusOpResponse, error) { + rsp, err := c.BulkDeleteStatusOp(ctx, spaceId, taskId, reqEditors...) + if err != nil { + return nil, err } - return nil + return ParseBulkDeleteStatusOpResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item to handle AdditionalProperties -func (a PackageListItem_InstallationInfo_LatestInstallFailedAttempts_Item) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - object["created_at"], err = json.Marshal(a.CreatedAt) +// DeleteRollupDataOpWithBodyWithResponse request with arbitrary body returning *DeleteRollupDataOpResponse +func (c *ClientWithResponses) DeleteRollupDataOpWithBodyWithResponse(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteRollupDataOpResponse, error) { + rsp, err := c.DeleteRollupDataOpWithBody(ctx, spaceId, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'created_at': %w", err) + return nil, err } + return ParseDeleteRollupDataOpResponse(rsp) +} - object["error"], err = json.Marshal(a.Error) +func (c *ClientWithResponses) DeleteRollupDataOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, body DeleteRollupDataOpJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteRollupDataOpResponse, error) { + rsp, err := c.DeleteRollupDataOp(ctx, spaceId, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'error': %w", err) + return nil, err } + return ParseDeleteRollupDataOpResponse(rsp) +} - object["target_version"], err = json.Marshal(a.TargetVersion) +// DeleteSloInstancesOpWithBodyWithResponse request with arbitrary body returning *DeleteSloInstancesOpResponse +func (c *ClientWithResponses) DeleteSloInstancesOpWithBodyWithResponse(ctx context.Context, spaceId SLOsSpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteSloInstancesOpResponse, error) { + rsp, err := c.DeleteSloInstancesOpWithBody(ctx, spaceId, contentType, body, reqEditors...) if err != nil { - return nil, fmt.Errorf("error marshaling 'target_version': %w", err) + return nil, err } + return ParseDeleteSloInstancesOpResponse(rsp) +} - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } +func (c *ClientWithResponses) DeleteSloInstancesOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, body DeleteSloInstancesOpJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteSloInstancesOpResponse, error) { + rsp, err := c.DeleteSloInstancesOp(ctx, spaceId, body, reqEditors...) + if err != nil { + return nil, err } - return json.Marshal(object) + return ParseDeleteSloInstancesOpResponse(rsp) } -// Getter for additional properties for PackageListItem_InstallationInfo. Returns the specified -// element and whether it was found -func (a PackageListItem_InstallationInfo) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// DeleteSloOpWithResponse request returning *DeleteSloOpResponse +func (c *ClientWithResponses) DeleteSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*DeleteSloOpResponse, error) { + rsp, err := c.DeleteSloOp(ctx, spaceId, sloId, reqEditors...) + if err != nil { + return nil, err } - return + return ParseDeleteSloOpResponse(rsp) } -// Setter for additional properties for PackageListItem_InstallationInfo -func (a *PackageListItem_InstallationInfo) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// GetSloOpWithResponse request returning *GetSloOpResponse +func (c *ClientWithResponses) GetSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, params *GetSloOpParams, reqEditors ...RequestEditorFn) (*GetSloOpResponse, error) { + rsp, err := c.GetSloOp(ctx, spaceId, sloId, params, reqEditors...) + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value + return ParseGetSloOpResponse(rsp) } -// Override default JSON handling for PackageListItem_InstallationInfo to handle AdditionalProperties -func (a *PackageListItem_InstallationInfo) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// UpdateSloOpWithBodyWithResponse request with arbitrary body returning *UpdateSloOpResponse +func (c *ClientWithResponses) UpdateSloOpWithBodyWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateSloOpResponse, error) { + rsp, err := c.UpdateSloOpWithBody(ctx, spaceId, sloId, contentType, body, reqEditors...) if err != nil { - return err + return nil, err } + return ParseUpdateSloOpResponse(rsp) +} - if raw, found := object["additional_spaces_installed_kibana"]; found { - err = json.Unmarshal(raw, &a.AdditionalSpacesInstalledKibana) - if err != nil { - return fmt.Errorf("error reading 'additional_spaces_installed_kibana': %w", err) - } - delete(object, "additional_spaces_installed_kibana") +func (c *ClientWithResponses) UpdateSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, body UpdateSloOpJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateSloOpResponse, error) { + rsp, err := c.UpdateSloOp(ctx, spaceId, sloId, body, reqEditors...) + if err != nil { + return nil, err } + return ParseUpdateSloOpResponse(rsp) +} - if raw, found := object["created_at"]; found { - err = json.Unmarshal(raw, &a.CreatedAt) - if err != nil { - return fmt.Errorf("error reading 'created_at': %w", err) - } - delete(object, "created_at") +// ResetSloOpWithResponse request returning *ResetSloOpResponse +func (c *ClientWithResponses) ResetSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*ResetSloOpResponse, error) { + rsp, err := c.ResetSloOp(ctx, spaceId, sloId, reqEditors...) + if err != nil { + return nil, err } + return ParseResetSloOpResponse(rsp) +} - if raw, found := object["experimental_data_stream_features"]; found { - err = json.Unmarshal(raw, &a.ExperimentalDataStreamFeatures) - if err != nil { - return fmt.Errorf("error reading 'experimental_data_stream_features': %w", err) - } - delete(object, "experimental_data_stream_features") +// DisableSloOpWithResponse request returning *DisableSloOpResponse +func (c *ClientWithResponses) DisableSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*DisableSloOpResponse, error) { + rsp, err := c.DisableSloOp(ctx, spaceId, sloId, reqEditors...) + if err != nil { + return nil, err } + return ParseDisableSloOpResponse(rsp) +} - if raw, found := object["install_format_schema_version"]; found { - err = json.Unmarshal(raw, &a.InstallFormatSchemaVersion) - if err != nil { - return fmt.Errorf("error reading 'install_format_schema_version': %w", err) - } - delete(object, "install_format_schema_version") +// EnableSloOpWithResponse request returning *EnableSloOpResponse +func (c *ClientWithResponses) EnableSloOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, sloId SLOsSloId, reqEditors ...RequestEditorFn) (*EnableSloOpResponse, error) { + rsp, err := c.EnableSloOp(ctx, spaceId, sloId, reqEditors...) + if err != nil { + return nil, err } + return ParseEnableSloOpResponse(rsp) +} - if raw, found := object["install_source"]; found { - err = json.Unmarshal(raw, &a.InstallSource) - if err != nil { - return fmt.Errorf("error reading 'install_source': %w", err) - } - delete(object, "install_source") +// GetDefinitionsOpWithResponse request returning *GetDefinitionsOpResponse +func (c *ClientWithResponses) GetDefinitionsOpWithResponse(ctx context.Context, spaceId SLOsSpaceId, params *GetDefinitionsOpParams, reqEditors ...RequestEditorFn) (*GetDefinitionsOpResponse, error) { + rsp, err := c.GetDefinitionsOp(ctx, spaceId, params, reqEditors...) + if err != nil { + return nil, err } + return ParseGetDefinitionsOpResponse(rsp) +} - if raw, found := object["install_status"]; found { - err = json.Unmarshal(raw, &a.InstallStatus) - if err != nil { - return fmt.Errorf("error reading 'install_status': %w", err) - } - delete(object, "install_status") +// ParsePostActionsConnectorIdExecuteResponse parses an HTTP response from a PostActionsConnectorIdExecuteWithResponse call +func ParsePostActionsConnectorIdExecuteResponse(rsp *http.Response) (*PostActionsConnectorIdExecuteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - if raw, found := object["installed_es"]; found { - err = json.Unmarshal(raw, &a.InstalledEs) - if err != nil { - return fmt.Errorf("error reading 'installed_es': %w", err) - } - delete(object, "installed_es") + response := &PostActionsConnectorIdExecuteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if raw, found := object["installed_kibana"]; found { - err = json.Unmarshal(raw, &a.InstalledKibana) - if err != nil { - return fmt.Errorf("error reading 'installed_kibana': %w", err) - } - delete(object, "installed_kibana") - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Config *map[string]interface{} `json:"config,omitempty"` - if raw, found := object["installed_kibana_space_id"]; found { - err = json.Unmarshal(raw, &a.InstalledKibanaSpaceId) - if err != nil { - return fmt.Errorf("error reading 'installed_kibana_space_id': %w", err) - } - delete(object, "installed_kibana_space_id") - } + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` - if raw, found := object["latest_executed_state"]; found { - err = json.Unmarshal(raw, &a.LatestExecutedState) - if err != nil { - return fmt.Errorf("error reading 'latest_executed_state': %w", err) - } - delete(object, "latest_executed_state") - } + // Id The identifier for the connector. + Id string `json:"id"` - if raw, found := object["latest_install_failed_attempts"]; found { - err = json.Unmarshal(raw, &a.LatestInstallFailedAttempts) - if err != nil { - return fmt.Errorf("error reading 'latest_install_failed_attempts': %w", err) + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` + + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` + + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` } - delete(object, "latest_install_failed_attempts") + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } - if raw, found := object["name"]; found { - err = json.Unmarshal(raw, &a.Name) - if err != nil { - return fmt.Errorf("error reading 'name': %w", err) - } - delete(object, "name") + return response, nil +} + +// ParseGetActionsConnectorTypesResponse parses an HTTP response from a GetActionsConnectorTypesWithResponse call +func ParseGetActionsConnectorTypesResponse(rsp *http.Response) (*GetActionsConnectorTypesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - if raw, found := object["namespaces"]; found { - err = json.Unmarshal(raw, &a.Namespaces) - if err != nil { - return fmt.Errorf("error reading 'namespaces': %w", err) - } - delete(object, "namespaces") + response := &GetActionsConnectorTypesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") + return response, nil +} + +// ParseGetAlertingHealthResponse parses an HTTP response from a GetAlertingHealthWithResponse call +func ParseGetAlertingHealthResponse(rsp *http.Response) (*GetAlertingHealthResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - if raw, found := object["updated_at"]; found { - err = json.Unmarshal(raw, &a.UpdatedAt) - if err != nil { - return fmt.Errorf("error reading 'updated_at': %w", err) - } - delete(object, "updated_at") + response := &GetAlertingHealthResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if raw, found := object["verification_key_id"]; found { - err = json.Unmarshal(raw, &a.VerificationKeyId) - if err != nil { - return fmt.Errorf("error reading 'verification_key_id': %w", err) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // AlertingFrameworkHealth Three substates identify the health of the alerting framework: `decryption_health`, `execution_health`, and `read_health`. + AlertingFrameworkHealth *struct { + // DecryptionHealth The timestamp and status of the rule decryption. + DecryptionHealth *struct { + Status *GetAlertingHealth200AlertingFrameworkHealthDecryptionHealthStatus `json:"status,omitempty"` + Timestamp *time.Time `json:"timestamp,omitempty"` + } `json:"decryption_health,omitempty"` + + // ExecutionHealth The timestamp and status of the rule run. + ExecutionHealth *struct { + Status *GetAlertingHealth200AlertingFrameworkHealthExecutionHealthStatus `json:"status,omitempty"` + Timestamp *time.Time `json:"timestamp,omitempty"` + } `json:"execution_health,omitempty"` + + // ReadHealth The timestamp and status of the rule reading events. + ReadHealth *struct { + Status *GetAlertingHealth200AlertingFrameworkHealthReadHealthStatus `json:"status,omitempty"` + Timestamp *time.Time `json:"timestamp,omitempty"` + } `json:"read_health,omitempty"` + } `json:"alerting_framework_health,omitempty"` + + // HasPermanentEncryptionKey If `false`, the encrypted saved object plugin does not have a permanent encryption key. + HasPermanentEncryptionKey *bool `json:"has_permanent_encryption_key,omitempty"` + + // IsSufficientlySecure If `false`, security is enabled but TLS is not. + IsSufficientlySecure *bool `json:"is_sufficiently_secure,omitempty"` } - delete(object, "verification_key_id") - } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - if raw, found := object["verification_status"]; found { - err = json.Unmarshal(raw, &a.VerificationStatus) - if err != nil { - return fmt.Errorf("error reading 'verification_status': %w", err) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Alerting401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } - delete(object, "verification_status") + response.JSON401 = &dest + } - if raw, found := object["version"]; found { - err = json.Unmarshal(raw, &a.Version) - if err != nil { - return fmt.Errorf("error reading 'version': %w", err) - } - delete(object, "version") + return response, nil +} + +// ParseDeleteAlertingRuleIdResponse parses an HTTP response from a DeleteAlertingRuleIdWithResponse call +func ParseDeleteAlertingRuleIdResponse(rsp *http.Response) (*DeleteAlertingRuleIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } + response := &DeleteAlertingRuleIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return nil -} -// Override default JSON handling for PackageListItem_InstallationInfo to handle AdditionalProperties -func (a PackageListItem_InstallationInfo) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) + return response, nil +} - if a.AdditionalSpacesInstalledKibana != nil { - object["additional_spaces_installed_kibana"], err = json.Marshal(a.AdditionalSpacesInstalledKibana) - if err != nil { - return nil, fmt.Errorf("error marshaling 'additional_spaces_installed_kibana': %w", err) - } +// ParseGetAlertingRuleIdResponse parses an HTTP response from a GetAlertingRuleIdWithResponse call +func ParseGetAlertingRuleIdResponse(rsp *http.Response) (*GetAlertingRuleIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - if a.CreatedAt != nil { - object["created_at"], err = json.Marshal(a.CreatedAt) - if err != nil { - return nil, fmt.Errorf("error marshaling 'created_at': %w", err) - } + response := &GetAlertingRuleIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if a.ExperimentalDataStreamFeatures != nil { - object["experimental_data_stream_features"], err = json.Marshal(a.ExperimentalDataStreamFeatures) - if err != nil { - return nil, fmt.Errorf("error marshaling 'experimental_data_stream_features': %w", err) - } - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Actions []struct { + // AlertsFilter Defines a period that limits whether the action runs. + AlertsFilter *struct { + Query *struct { + // Dsl A filter written in Elasticsearch Query Domain Specific Language (DSL). + Dsl *string `json:"dsl,omitempty"` + + // Filters A filter written in Elasticsearch Query Domain Specific Language (DSL) as defined in the `kbn-es-query` package. + Filters []struct { + State *struct { + // Store A filter can be either specific to an application context or applied globally. + Store GetAlertingRuleId200ActionsAlertsFilterQueryFiltersStateStore `json:"store"` + } `json:"$state,omitempty"` + Meta map[string]interface{} `json:"meta"` + Query *map[string]interface{} `json:"query,omitempty"` + } `json:"filters"` + + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query,omitempty"` + Timeframe *struct { + // Days Defines the days of the week that the action can run, represented as an array of numbers. For example, `1` represents Monday. An empty array is equivalent to specifying all the days of the week. + Days []GetAlertingRuleId200ActionsAlertsFilterTimeframeDays `json:"days"` + Hours struct { + // End The end of the time frame in 24-hour notation (`hh:mm`). + End string `json:"end"` + + // Start The start of the time frame in 24-hour notation (`hh:mm`). + Start string `json:"start"` + } `json:"hours"` + + // Timezone The ISO time zone for the `hours` values. Values such as `UTC` and `UTC+1` also work but lack built-in daylight savings time support and are not recommended. + Timezone string `json:"timezone"` + } `json:"timeframe,omitempty"` + } `json:"alerts_filter,omitempty"` + + // ConnectorTypeId The type of connector. This property appears in responses but cannot be set in requests. + ConnectorTypeId string `json:"connector_type_id"` + Frequency *struct { + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen GetAlertingRuleId200ActionsFrequencyNotifyWhen `json:"notify_when"` + + // Summary Indicates whether the action is a summary. + Summary bool `json:"summary"` + + // Throttle The throttle interval, which defines how often an alert generates repeated actions. It is specified in seconds, minutes, hours, or days and is applicable only if 'notify_when' is set to 'onThrottleInterval'. NOTE: You cannot specify the throttle interval at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle"` + } `json:"frequency,omitempty"` + + // Group The group name, which affects when the action runs (for example, when the threshold is met or when the alert is recovered). Each rule type has a list of valid action group names. If you don't need to group actions, set to `default`. + Group *string `json:"group,omitempty"` + + // Id The identifier for the connector saved object. + Id string `json:"id"` + + // Params The parameters for the action, which are sent to the connector. The `params` are handled as Mustache templates and passed a default set of context. + Params map[string]interface{} `json:"params"` + + // UseAlertDataForTemplate Indicates whether to use alert data as a template. + UseAlertDataForTemplate *bool `json:"use_alert_data_for_template,omitempty"` + + // Uuid A universally unique identifier (UUID) for the action. + Uuid *string `json:"uuid,omitempty"` + } `json:"actions"` + ActiveSnoozes *[]string `json:"active_snoozes,omitempty"` + + // AlertDelay Indicates that an alert occurs only when the specified number of consecutive runs met the rule conditions. + AlertDelay *struct { + // Active The number of consecutive runs that must meet the rule conditions. + Active float32 `json:"active"` + } `json:"alert_delay,omitempty"` + + // ApiKeyCreatedByUser Indicates whether the API key that is associated with the rule was created by the user. + ApiKeyCreatedByUser *bool `json:"api_key_created_by_user"` + + // ApiKeyOwner The owner of the API key that is associated with the rule and used to run background tasks. + ApiKeyOwner *string `json:"api_key_owner"` + Artifacts *struct { + Dashboards *[]struct { + Id string `json:"id"` + } `json:"dashboards,omitempty"` + InvestigationGuide *struct { + // Blob User-created content that describes alert causes and remdiation. + Blob string `json:"blob"` + } `json:"investigation_guide,omitempty"` + } `json:"artifacts,omitempty"` + + // Consumer The name of the application or feature that owns the rule. For example: `alerts`, `apm`, `discover`, `infrastructure`, `logs`, `metrics`, `ml`, `monitoring`, `securitySolution`, `siem`, `stackAlerts`, or `uptime`. + Consumer string `json:"consumer"` + + // CreatedAt The date and time that the rule was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the rule. + CreatedBy *string `json:"created_by"` + + // Enabled Indicates whether you want to run the rule on an interval basis after it is created. + Enabled bool `json:"enabled"` + ExecutionStatus struct { + Error *struct { + // Message Error message. + Message string `json:"message"` + + // Reason Reason for error. + Reason GetAlertingRuleId200ExecutionStatusErrorReason `json:"reason"` + } `json:"error,omitempty"` + + // LastDuration Duration of last execution of the rule. + LastDuration *float32 `json:"last_duration,omitempty"` + + // LastExecutionDate The date and time when rule was executed last. + LastExecutionDate string `json:"last_execution_date"` + + // Status Status of rule execution. + Status GetAlertingRuleId200ExecutionStatusStatus `json:"status"` + Warning *struct { + // Message Warning message. + Message string `json:"message"` + + // Reason Reason for warning. + Reason GetAlertingRuleId200ExecutionStatusWarningReason `json:"reason"` + } `json:"warning,omitempty"` + } `json:"execution_status"` + + // Flapping When flapping detection is turned on, alerts that switch quickly between active and recovered states are identified as “flapping” and notifications are reduced. + Flapping *struct { + // LookBackWindow The minimum number of runs in which the threshold must be met. + LookBackWindow float32 `json:"look_back_window"` + + // StatusChangeThreshold The minimum number of times an alert must switch states in the look back window. + StatusChangeThreshold float32 `json:"status_change_threshold"` + } `json:"flapping"` + + // Id The identifier for the rule. + Id string `json:"id"` + + // IsSnoozedUntil The date when the rule will no longer be snoozed. + IsSnoozedUntil *string `json:"is_snoozed_until"` + LastRun *struct { + AlertsCount struct { + // Active Number of active alerts during last run. + Active *float32 `json:"active"` + + // Ignored Number of ignored alerts during last run. + Ignored *float32 `json:"ignored"` + + // New Number of new alerts during last run. + New *float32 `json:"new"` + + // Recovered Number of recovered alerts during last run. + Recovered *float32 `json:"recovered"` + } `json:"alerts_count"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome GetAlertingRuleId200LastRunOutcome `json:"outcome"` + OutcomeMsg *[]string `json:"outcome_msg"` + + // OutcomeOrder Order of the outcome. + OutcomeOrder *float32 `json:"outcome_order,omitempty"` + + // Warning Warning of last rule execution. + Warning *GetAlertingRuleId200LastRunWarning `json:"warning"` + } `json:"last_run"` + MappedParams *map[string]interface{} `json:"mapped_params,omitempty"` + + // Monitoring Monitoring details of the rule. + Monitoring *struct { + // Run Rule run details. + Run struct { + // CalculatedMetrics Calculation of different percentiles and success ratio. + CalculatedMetrics struct { + P50 *float32 `json:"p50,omitempty"` + P95 *float32 `json:"p95,omitempty"` + P99 *float32 `json:"p99,omitempty"` + SuccessRatio float32 `json:"success_ratio"` + } `json:"calculated_metrics"` + + // History History of the rule run. + History []struct { + // Duration Duration of the rule run. + Duration *float32 `json:"duration,omitempty"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome *GetAlertingRuleId200MonitoringRunHistoryOutcome `json:"outcome,omitempty"` + + // Success Indicates whether the rule run was successful. + Success bool `json:"success"` + + // Timestamp Time of rule run. + Timestamp float32 `json:"timestamp"` + } `json:"history"` + LastRun struct { + Metrics struct { + // Duration Duration of most recent rule run. + Duration *float32 `json:"duration,omitempty"` + + // GapDurationS Duration in seconds of rule run gap. + GapDurationS *float32 `json:"gap_duration_s"` + GapRange *struct { + // Gte End of the gap range. + Gte string `json:"gte"` + + // Lte Start of the gap range. + Lte string `json:"lte"` + } `json:"gap_range"` + + // TotalAlertsCreated Total number of alerts created during last rule run. + TotalAlertsCreated *float32 `json:"total_alerts_created"` + + // TotalAlertsDetected Total number of alerts detected during last rule run. + TotalAlertsDetected *float32 `json:"total_alerts_detected"` + + // TotalIndexingDurationMs Total time spent indexing documents during last rule run in milliseconds. + TotalIndexingDurationMs *float32 `json:"total_indexing_duration_ms"` + + // TotalSearchDurationMs Total time spent performing Elasticsearch searches as measured by Kibana; includes network latency and time spent serializing or deserializing the request and response. + TotalSearchDurationMs *float32 `json:"total_search_duration_ms"` + } `json:"metrics"` + + // Timestamp Time of the most recent rule run. + Timestamp string `json:"timestamp"` + } `json:"last_run"` + } `json:"run"` + } `json:"monitoring,omitempty"` + + // MuteAll Indicates whether all alerts are muted. + MuteAll bool `json:"mute_all"` + MutedAlertIds []string `json:"muted_alert_ids"` + + // Name The name of the rule. + Name string `json:"name"` + + // NextRun Date and time of the next run of the rule. + NextRun *string `json:"next_run"` + + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen *GetAlertingRuleId200NotifyWhen `json:"notify_when"` + + // Params The parameters for the rule. + Params map[string]interface{} `json:"params"` + + // Revision The rule revision number. + Revision float32 `json:"revision"` + + // RuleTypeId The rule type identifier. + RuleTypeId string `json:"rule_type_id"` + + // Running Indicates whether the rule is running. + Running *bool `json:"running"` + Schedule struct { + // Interval The interval is specified in seconds, minutes, hours, or days. + Interval string `json:"interval"` + } `json:"schedule"` + + // ScheduledTaskId Identifier of the scheduled task. + ScheduledTaskId *string `json:"scheduled_task_id,omitempty"` + SnoozeSchedule *[]struct { + // Duration Duration of the rule snooze schedule. + Duration float32 `json:"duration"` + + // Id Identifier of the rule snooze schedule. + Id *string `json:"id,omitempty"` + RRule struct { + Byhour *[]float32 `json:"byhour"` + Byminute *[]float32 `json:"byminute"` + Bymonth *[]float32 `json:"bymonth"` + Bymonthday *[]float32 `json:"bymonthday"` + Bysecond *[]float32 `json:"bysecond"` + Bysetpos *[]float32 `json:"bysetpos"` + Byweekday *[]GetAlertingRuleId_200_SnoozeSchedule_RRule_Byweekday_Item `json:"byweekday"` + Byweekno *[]float32 `json:"byweekno"` + Byyearday *[]float32 `json:"byyearday"` + + // Count Number of times the rule should recur until it stops. + Count *float32 `json:"count,omitempty"` + + // Dtstart Rule start date in Coordinated Universal Time (UTC). + Dtstart string `json:"dtstart"` + + // Freq Indicates frequency of the rule. Options are YEARLY, MONTHLY, WEEKLY, DAILY. + Freq *GetAlertingRuleId200SnoozeScheduleRRuleFreq `json:"freq,omitempty"` + + // Interval Indicates the interval of frequency. For example, 1 and YEARLY is every 1 year, 2 and WEEKLY is every 2 weeks. + Interval *float32 `json:"interval,omitempty"` + + // Tzid Indicates timezone abbreviation. + Tzid string `json:"tzid"` + + // Until Recur the rule until this date. + Until *string `json:"until,omitempty"` + + // Wkst Indicates the start of week, defaults to Monday. + Wkst *GetAlertingRuleId200SnoozeScheduleRRuleWkst `json:"wkst,omitempty"` + } `json:"rRule"` + SkipRecurrences *[]string `json:"skipRecurrences,omitempty"` + } `json:"snooze_schedule,omitempty"` + Tags []string `json:"tags"` + + // Throttle Deprecated in 8.13.0. Use the `throttle` property in the action `frequency` object instead. The throttle interval, which defines how often an alert generates repeated actions. NOTE: You cannot specify the throttle interval at both the rule and action level. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Throttle *string `json:"throttle"` + + // UpdatedAt The date and time that the rule was updated most recently. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that updated this rule most recently. + UpdatedBy *string `json:"updated_by"` - if a.InstallFormatSchemaVersion != nil { - object["install_format_schema_version"], err = json.Marshal(a.InstallFormatSchemaVersion) - if err != nil { - return nil, fmt.Errorf("error marshaling 'install_format_schema_version': %w", err) + // ViewInAppRelativeUrl Relative URL to view rule in the app. + ViewInAppRelativeUrl *string `json:"view_in_app_relative_url"` } - } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - object["install_source"], err = json.Marshal(a.InstallSource) - if err != nil { - return nil, fmt.Errorf("error marshaling 'install_source': %w", err) } - object["install_status"], err = json.Marshal(a.InstallStatus) - if err != nil { - return nil, fmt.Errorf("error marshaling 'install_status': %w", err) - } + return response, nil +} - object["installed_es"], err = json.Marshal(a.InstalledEs) +// ParsePostAlertingRuleIdResponse parses an HTTP response from a PostAlertingRuleIdWithResponse call +func ParsePostAlertingRuleIdResponse(rsp *http.Response) (*PostAlertingRuleIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return nil, fmt.Errorf("error marshaling 'installed_es': %w", err) + return nil, err } - object["installed_kibana"], err = json.Marshal(a.InstalledKibana) - if err != nil { - return nil, fmt.Errorf("error marshaling 'installed_kibana': %w", err) + response := &PostAlertingRuleIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if a.InstalledKibanaSpaceId != nil { - object["installed_kibana_space_id"], err = json.Marshal(a.InstalledKibanaSpaceId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'installed_kibana_space_id': %w", err) - } - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Actions []struct { + // AlertsFilter Defines a period that limits whether the action runs. + AlertsFilter *struct { + Query *struct { + // Dsl A filter written in Elasticsearch Query Domain Specific Language (DSL). + Dsl *string `json:"dsl,omitempty"` + + // Filters A filter written in Elasticsearch Query Domain Specific Language (DSL) as defined in the `kbn-es-query` package. + Filters []struct { + State *struct { + // Store A filter can be either specific to an application context or applied globally. + Store PostAlertingRuleId200ActionsAlertsFilterQueryFiltersStateStore `json:"store"` + } `json:"$state,omitempty"` + Meta map[string]interface{} `json:"meta"` + Query *map[string]interface{} `json:"query,omitempty"` + } `json:"filters"` - if a.LatestExecutedState != nil { - object["latest_executed_state"], err = json.Marshal(a.LatestExecutedState) - if err != nil { - return nil, fmt.Errorf("error marshaling 'latest_executed_state': %w", err) - } - } + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query,omitempty"` + Timeframe *struct { + // Days Defines the days of the week that the action can run, represented as an array of numbers. For example, `1` represents Monday. An empty array is equivalent to specifying all the days of the week. + Days []PostAlertingRuleId200ActionsAlertsFilterTimeframeDays `json:"days"` + Hours struct { + // End The end of the time frame in 24-hour notation (`hh:mm`). + End string `json:"end"` + + // Start The start of the time frame in 24-hour notation (`hh:mm`). + Start string `json:"start"` + } `json:"hours"` + + // Timezone The ISO time zone for the `hours` values. Values such as `UTC` and `UTC+1` also work but lack built-in daylight savings time support and are not recommended. + Timezone string `json:"timezone"` + } `json:"timeframe,omitempty"` + } `json:"alerts_filter,omitempty"` + + // ConnectorTypeId The type of connector. This property appears in responses but cannot be set in requests. + ConnectorTypeId string `json:"connector_type_id"` + Frequency *struct { + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen PostAlertingRuleId200ActionsFrequencyNotifyWhen `json:"notify_when"` + + // Summary Indicates whether the action is a summary. + Summary bool `json:"summary"` + + // Throttle The throttle interval, which defines how often an alert generates repeated actions. It is specified in seconds, minutes, hours, or days and is applicable only if 'notify_when' is set to 'onThrottleInterval'. NOTE: You cannot specify the throttle interval at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle"` + } `json:"frequency,omitempty"` + + // Group The group name, which affects when the action runs (for example, when the threshold is met or when the alert is recovered). Each rule type has a list of valid action group names. If you don't need to group actions, set to `default`. + Group *string `json:"group,omitempty"` + + // Id The identifier for the connector saved object. + Id string `json:"id"` + + // Params The parameters for the action, which are sent to the connector. The `params` are handled as Mustache templates and passed a default set of context. + Params map[string]interface{} `json:"params"` + + // UseAlertDataForTemplate Indicates whether to use alert data as a template. + UseAlertDataForTemplate *bool `json:"use_alert_data_for_template,omitempty"` + + // Uuid A universally unique identifier (UUID) for the action. + Uuid *string `json:"uuid,omitempty"` + } `json:"actions"` + ActiveSnoozes *[]string `json:"active_snoozes,omitempty"` + + // AlertDelay Indicates that an alert occurs only when the specified number of consecutive runs met the rule conditions. + AlertDelay *struct { + // Active The number of consecutive runs that must meet the rule conditions. + Active float32 `json:"active"` + } `json:"alert_delay,omitempty"` + + // ApiKeyCreatedByUser Indicates whether the API key that is associated with the rule was created by the user. + ApiKeyCreatedByUser *bool `json:"api_key_created_by_user"` + + // ApiKeyOwner The owner of the API key that is associated with the rule and used to run background tasks. + ApiKeyOwner *string `json:"api_key_owner"` + Artifacts *struct { + Dashboards *[]struct { + Id string `json:"id"` + } `json:"dashboards,omitempty"` + InvestigationGuide *struct { + // Blob User-created content that describes alert causes and remdiation. + Blob string `json:"blob"` + } `json:"investigation_guide,omitempty"` + } `json:"artifacts,omitempty"` + + // Consumer The name of the application or feature that owns the rule. For example: `alerts`, `apm`, `discover`, `infrastructure`, `logs`, `metrics`, `ml`, `monitoring`, `securitySolution`, `siem`, `stackAlerts`, or `uptime`. + Consumer string `json:"consumer"` + + // CreatedAt The date and time that the rule was created. + CreatedAt string `json:"created_at"` - if a.LatestInstallFailedAttempts != nil { - object["latest_install_failed_attempts"], err = json.Marshal(a.LatestInstallFailedAttempts) - if err != nil { - return nil, fmt.Errorf("error marshaling 'latest_install_failed_attempts': %w", err) - } - } + // CreatedBy The identifier for the user that created the rule. + CreatedBy *string `json:"created_by"` - object["name"], err = json.Marshal(a.Name) - if err != nil { - return nil, fmt.Errorf("error marshaling 'name': %w", err) - } + // Enabled Indicates whether you want to run the rule on an interval basis after it is created. + Enabled bool `json:"enabled"` + ExecutionStatus struct { + Error *struct { + // Message Error message. + Message string `json:"message"` - if a.Namespaces != nil { - object["namespaces"], err = json.Marshal(a.Namespaces) - if err != nil { - return nil, fmt.Errorf("error marshaling 'namespaces': %w", err) - } - } + // Reason Reason for error. + Reason PostAlertingRuleId200ExecutionStatusErrorReason `json:"reason"` + } `json:"error,omitempty"` - object["type"], err = json.Marshal(a.Type) - if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) - } + // LastDuration Duration of last execution of the rule. + LastDuration *float32 `json:"last_duration,omitempty"` - if a.UpdatedAt != nil { - object["updated_at"], err = json.Marshal(a.UpdatedAt) - if err != nil { - return nil, fmt.Errorf("error marshaling 'updated_at': %w", err) - } - } + // LastExecutionDate The date and time when rule was executed last. + LastExecutionDate string `json:"last_execution_date"` - if a.VerificationKeyId != nil { - object["verification_key_id"], err = json.Marshal(a.VerificationKeyId) - if err != nil { - return nil, fmt.Errorf("error marshaling 'verification_key_id': %w", err) - } - } + // Status Status of rule execution. + Status PostAlertingRuleId200ExecutionStatusStatus `json:"status"` + Warning *struct { + // Message Warning message. + Message string `json:"message"` - object["verification_status"], err = json.Marshal(a.VerificationStatus) - if err != nil { - return nil, fmt.Errorf("error marshaling 'verification_status': %w", err) - } + // Reason Reason for warning. + Reason PostAlertingRuleId200ExecutionStatusWarningReason `json:"reason"` + } `json:"warning,omitempty"` + } `json:"execution_status"` - object["version"], err = json.Marshal(a.Version) - if err != nil { - return nil, fmt.Errorf("error marshaling 'version': %w", err) - } + // Flapping When flapping detection is turned on, alerts that switch quickly between active and recovered states are identified as “flapping” and notifications are reduced. + Flapping *struct { + // LookBackWindow The minimum number of runs in which the threshold must be met. + LookBackWindow float32 `json:"look_back_window"` - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + // StatusChangeThreshold The minimum number of times an alert must switch states in the look back window. + StatusChangeThreshold float32 `json:"status_change_threshold"` + } `json:"flapping"` + + // Id The identifier for the rule. + Id string `json:"id"` + + // IsSnoozedUntil The date when the rule will no longer be snoozed. + IsSnoozedUntil *string `json:"is_snoozed_until"` + LastRun *struct { + AlertsCount struct { + // Active Number of active alerts during last run. + Active *float32 `json:"active"` + + // Ignored Number of ignored alerts during last run. + Ignored *float32 `json:"ignored"` + + // New Number of new alerts during last run. + New *float32 `json:"new"` + + // Recovered Number of recovered alerts during last run. + Recovered *float32 `json:"recovered"` + } `json:"alerts_count"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome PostAlertingRuleId200LastRunOutcome `json:"outcome"` + OutcomeMsg *[]string `json:"outcome_msg"` + + // OutcomeOrder Order of the outcome. + OutcomeOrder *float32 `json:"outcome_order,omitempty"` + + // Warning Warning of last rule execution. + Warning *PostAlertingRuleId200LastRunWarning `json:"warning"` + } `json:"last_run"` + MappedParams *map[string]interface{} `json:"mapped_params,omitempty"` + + // Monitoring Monitoring details of the rule. + Monitoring *struct { + // Run Rule run details. + Run struct { + // CalculatedMetrics Calculation of different percentiles and success ratio. + CalculatedMetrics struct { + P50 *float32 `json:"p50,omitempty"` + P95 *float32 `json:"p95,omitempty"` + P99 *float32 `json:"p99,omitempty"` + SuccessRatio float32 `json:"success_ratio"` + } `json:"calculated_metrics"` + + // History History of the rule run. + History []struct { + // Duration Duration of the rule run. + Duration *float32 `json:"duration,omitempty"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome *PostAlertingRuleId200MonitoringRunHistoryOutcome `json:"outcome,omitempty"` + + // Success Indicates whether the rule run was successful. + Success bool `json:"success"` + + // Timestamp Time of rule run. + Timestamp float32 `json:"timestamp"` + } `json:"history"` + LastRun struct { + Metrics struct { + // Duration Duration of most recent rule run. + Duration *float32 `json:"duration,omitempty"` + + // GapDurationS Duration in seconds of rule run gap. + GapDurationS *float32 `json:"gap_duration_s"` + GapRange *struct { + // Gte End of the gap range. + Gte string `json:"gte"` + + // Lte Start of the gap range. + Lte string `json:"lte"` + } `json:"gap_range"` + + // TotalAlertsCreated Total number of alerts created during last rule run. + TotalAlertsCreated *float32 `json:"total_alerts_created"` + + // TotalAlertsDetected Total number of alerts detected during last rule run. + TotalAlertsDetected *float32 `json:"total_alerts_detected"` + + // TotalIndexingDurationMs Total time spent indexing documents during last rule run in milliseconds. + TotalIndexingDurationMs *float32 `json:"total_indexing_duration_ms"` + + // TotalSearchDurationMs Total time spent performing Elasticsearch searches as measured by Kibana; includes network latency and time spent serializing or deserializing the request and response. + TotalSearchDurationMs *float32 `json:"total_search_duration_ms"` + } `json:"metrics"` + + // Timestamp Time of the most recent rule run. + Timestamp string `json:"timestamp"` + } `json:"last_run"` + } `json:"run"` + } `json:"monitoring,omitempty"` + + // MuteAll Indicates whether all alerts are muted. + MuteAll bool `json:"mute_all"` + MutedAlertIds []string `json:"muted_alert_ids"` + + // Name The name of the rule. + Name string `json:"name"` + + // NextRun Date and time of the next run of the rule. + NextRun *string `json:"next_run"` + + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen *PostAlertingRuleId200NotifyWhen `json:"notify_when"` + + // Params The parameters for the rule. + Params map[string]interface{} `json:"params"` + + // Revision The rule revision number. + Revision float32 `json:"revision"` + + // RuleTypeId The rule type identifier. + RuleTypeId string `json:"rule_type_id"` + + // Running Indicates whether the rule is running. + Running *bool `json:"running"` + Schedule struct { + // Interval The interval is specified in seconds, minutes, hours, or days. + Interval string `json:"interval"` + } `json:"schedule"` + + // ScheduledTaskId Identifier of the scheduled task. + ScheduledTaskId *string `json:"scheduled_task_id,omitempty"` + SnoozeSchedule *[]struct { + // Duration Duration of the rule snooze schedule. + Duration float32 `json:"duration"` + + // Id Identifier of the rule snooze schedule. + Id *string `json:"id,omitempty"` + RRule struct { + Byhour *[]float32 `json:"byhour"` + Byminute *[]float32 `json:"byminute"` + Bymonth *[]float32 `json:"bymonth"` + Bymonthday *[]float32 `json:"bymonthday"` + Bysecond *[]float32 `json:"bysecond"` + Bysetpos *[]float32 `json:"bysetpos"` + Byweekday *[]PostAlertingRuleId_200_SnoozeSchedule_RRule_Byweekday_Item `json:"byweekday"` + Byweekno *[]float32 `json:"byweekno"` + Byyearday *[]float32 `json:"byyearday"` + + // Count Number of times the rule should recur until it stops. + Count *float32 `json:"count,omitempty"` + + // Dtstart Rule start date in Coordinated Universal Time (UTC). + Dtstart string `json:"dtstart"` + + // Freq Indicates frequency of the rule. Options are YEARLY, MONTHLY, WEEKLY, DAILY. + Freq *PostAlertingRuleId200SnoozeScheduleRRuleFreq `json:"freq,omitempty"` + + // Interval Indicates the interval of frequency. For example, 1 and YEARLY is every 1 year, 2 and WEEKLY is every 2 weeks. + Interval *float32 `json:"interval,omitempty"` + + // Tzid Indicates timezone abbreviation. + Tzid string `json:"tzid"` + + // Until Recur the rule until this date. + Until *string `json:"until,omitempty"` + + // Wkst Indicates the start of week, defaults to Monday. + Wkst *PostAlertingRuleId200SnoozeScheduleRRuleWkst `json:"wkst,omitempty"` + } `json:"rRule"` + SkipRecurrences *[]string `json:"skipRecurrences,omitempty"` + } `json:"snooze_schedule,omitempty"` + Tags []string `json:"tags"` + + // Throttle Deprecated in 8.13.0. Use the `throttle` property in the action `frequency` object instead. The throttle interval, which defines how often an alert generates repeated actions. NOTE: You cannot specify the throttle interval at both the rule and action level. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Throttle *string `json:"throttle"` + + // UpdatedAt The date and time that the rule was updated most recently. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that updated this rule most recently. + UpdatedBy *string `json:"updated_by"` + + // ViewInAppRelativeUrl Relative URL to view rule in the app. + ViewInAppRelativeUrl *string `json:"view_in_app_relative_url"` } - } - return json.Marshal(object) -} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// Getter for additional properties for PackageListItem_Owner. Returns the specified -// element and whether it was found -func (a PackageListItem_Owner) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] } - return -} -// Setter for additional properties for PackageListItem_Owner -func (a *PackageListItem_Owner) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) - } - a.AdditionalProperties[fieldName] = value + return response, nil } -// Override default JSON handling for PackageListItem_Owner to handle AdditionalProperties -func (a *PackageListItem_Owner) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// ParsePutAlertingRuleIdResponse parses an HTTP response from a PutAlertingRuleIdWithResponse call +func ParsePutAlertingRuleIdResponse(rsp *http.Response) (*PutAlertingRuleIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - if raw, found := object["github"]; found { - err = json.Unmarshal(raw, &a.Github) - if err != nil { - return fmt.Errorf("error reading 'github': %w", err) - } - delete(object, "github") + response := &PutAlertingRuleIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if raw, found := object["type"]; found { - err = json.Unmarshal(raw, &a.Type) - if err != nil { - return fmt.Errorf("error reading 'type': %w", err) - } - delete(object, "type") - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Actions []struct { + // AlertsFilter Defines a period that limits whether the action runs. + AlertsFilter *struct { + Query *struct { + // Dsl A filter written in Elasticsearch Query Domain Specific Language (DSL). + Dsl *string `json:"dsl,omitempty"` + + // Filters A filter written in Elasticsearch Query Domain Specific Language (DSL) as defined in the `kbn-es-query` package. + Filters []struct { + State *struct { + // Store A filter can be either specific to an application context or applied globally. + Store PutAlertingRuleId200ActionsAlertsFilterQueryFiltersStateStore `json:"store"` + } `json:"$state,omitempty"` + Meta map[string]interface{} `json:"meta"` + Query *map[string]interface{} `json:"query,omitempty"` + } `json:"filters"` - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } - } - return nil -} + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query,omitempty"` + Timeframe *struct { + // Days Defines the days of the week that the action can run, represented as an array of numbers. For example, `1` represents Monday. An empty array is equivalent to specifying all the days of the week. + Days []PutAlertingRuleId200ActionsAlertsFilterTimeframeDays `json:"days"` + Hours struct { + // End The end of the time frame in 24-hour notation (`hh:mm`). + End string `json:"end"` + + // Start The start of the time frame in 24-hour notation (`hh:mm`). + Start string `json:"start"` + } `json:"hours"` + + // Timezone The ISO time zone for the `hours` values. Values such as `UTC` and `UTC+1` also work but lack built-in daylight savings time support and are not recommended. + Timezone string `json:"timezone"` + } `json:"timeframe,omitempty"` + } `json:"alerts_filter,omitempty"` + + // ConnectorTypeId The type of connector. This property appears in responses but cannot be set in requests. + ConnectorTypeId string `json:"connector_type_id"` + Frequency *struct { + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen PutAlertingRuleId200ActionsFrequencyNotifyWhen `json:"notify_when"` + + // Summary Indicates whether the action is a summary. + Summary bool `json:"summary"` + + // Throttle The throttle interval, which defines how often an alert generates repeated actions. It is specified in seconds, minutes, hours, or days and is applicable only if 'notify_when' is set to 'onThrottleInterval'. NOTE: You cannot specify the throttle interval at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle"` + } `json:"frequency,omitempty"` + + // Group The group name, which affects when the action runs (for example, when the threshold is met or when the alert is recovered). Each rule type has a list of valid action group names. If you don't need to group actions, set to `default`. + Group *string `json:"group,omitempty"` + + // Id The identifier for the connector saved object. + Id string `json:"id"` + + // Params The parameters for the action, which are sent to the connector. The `params` are handled as Mustache templates and passed a default set of context. + Params map[string]interface{} `json:"params"` + + // UseAlertDataForTemplate Indicates whether to use alert data as a template. + UseAlertDataForTemplate *bool `json:"use_alert_data_for_template,omitempty"` + + // Uuid A universally unique identifier (UUID) for the action. + Uuid *string `json:"uuid,omitempty"` + } `json:"actions"` + ActiveSnoozes *[]string `json:"active_snoozes,omitempty"` + + // AlertDelay Indicates that an alert occurs only when the specified number of consecutive runs met the rule conditions. + AlertDelay *struct { + // Active The number of consecutive runs that must meet the rule conditions. + Active float32 `json:"active"` + } `json:"alert_delay,omitempty"` + + // ApiKeyCreatedByUser Indicates whether the API key that is associated with the rule was created by the user. + ApiKeyCreatedByUser *bool `json:"api_key_created_by_user"` + + // ApiKeyOwner The owner of the API key that is associated with the rule and used to run background tasks. + ApiKeyOwner *string `json:"api_key_owner"` + Artifacts *struct { + Dashboards *[]struct { + Id string `json:"id"` + } `json:"dashboards,omitempty"` + InvestigationGuide *struct { + // Blob User-created content that describes alert causes and remdiation. + Blob string `json:"blob"` + } `json:"investigation_guide,omitempty"` + } `json:"artifacts,omitempty"` + + // Consumer The name of the application or feature that owns the rule. For example: `alerts`, `apm`, `discover`, `infrastructure`, `logs`, `metrics`, `ml`, `monitoring`, `securitySolution`, `siem`, `stackAlerts`, or `uptime`. + Consumer string `json:"consumer"` + + // CreatedAt The date and time that the rule was created. + CreatedAt string `json:"created_at"` -// Override default JSON handling for PackageListItem_Owner to handle AdditionalProperties -func (a PackageListItem_Owner) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) + // CreatedBy The identifier for the user that created the rule. + CreatedBy *string `json:"created_by"` - if a.Github != nil { - object["github"], err = json.Marshal(a.Github) - if err != nil { - return nil, fmt.Errorf("error marshaling 'github': %w", err) - } - } + // Enabled Indicates whether you want to run the rule on an interval basis after it is created. + Enabled bool `json:"enabled"` + ExecutionStatus struct { + Error *struct { + // Message Error message. + Message string `json:"message"` - if a.Type != nil { - object["type"], err = json.Marshal(a.Type) - if err != nil { - return nil, fmt.Errorf("error marshaling 'type': %w", err) - } - } + // Reason Reason for error. + Reason PutAlertingRuleId200ExecutionStatusErrorReason `json:"reason"` + } `json:"error,omitempty"` - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + // LastDuration Duration of last execution of the rule. + LastDuration *float32 `json:"last_duration,omitempty"` + + // LastExecutionDate The date and time when rule was executed last. + LastExecutionDate string `json:"last_execution_date"` + + // Status Status of rule execution. + Status PutAlertingRuleId200ExecutionStatusStatus `json:"status"` + Warning *struct { + // Message Warning message. + Message string `json:"message"` + + // Reason Reason for warning. + Reason PutAlertingRuleId200ExecutionStatusWarningReason `json:"reason"` + } `json:"warning,omitempty"` + } `json:"execution_status"` + + // Flapping When flapping detection is turned on, alerts that switch quickly between active and recovered states are identified as “flapping” and notifications are reduced. + Flapping *struct { + // LookBackWindow The minimum number of runs in which the threshold must be met. + LookBackWindow float32 `json:"look_back_window"` + + // StatusChangeThreshold The minimum number of times an alert must switch states in the look back window. + StatusChangeThreshold float32 `json:"status_change_threshold"` + } `json:"flapping"` + + // Id The identifier for the rule. + Id string `json:"id"` + + // IsSnoozedUntil The date when the rule will no longer be snoozed. + IsSnoozedUntil *string `json:"is_snoozed_until"` + LastRun *struct { + AlertsCount struct { + // Active Number of active alerts during last run. + Active *float32 `json:"active"` + + // Ignored Number of ignored alerts during last run. + Ignored *float32 `json:"ignored"` + + // New Number of new alerts during last run. + New *float32 `json:"new"` + + // Recovered Number of recovered alerts during last run. + Recovered *float32 `json:"recovered"` + } `json:"alerts_count"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome PutAlertingRuleId200LastRunOutcome `json:"outcome"` + OutcomeMsg *[]string `json:"outcome_msg"` + + // OutcomeOrder Order of the outcome. + OutcomeOrder *float32 `json:"outcome_order,omitempty"` + + // Warning Warning of last rule execution. + Warning *PutAlertingRuleId200LastRunWarning `json:"warning"` + } `json:"last_run"` + MappedParams *map[string]interface{} `json:"mapped_params,omitempty"` + + // Monitoring Monitoring details of the rule. + Monitoring *struct { + // Run Rule run details. + Run struct { + // CalculatedMetrics Calculation of different percentiles and success ratio. + CalculatedMetrics struct { + P50 *float32 `json:"p50,omitempty"` + P95 *float32 `json:"p95,omitempty"` + P99 *float32 `json:"p99,omitempty"` + SuccessRatio float32 `json:"success_ratio"` + } `json:"calculated_metrics"` + + // History History of the rule run. + History []struct { + // Duration Duration of the rule run. + Duration *float32 `json:"duration,omitempty"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome *PutAlertingRuleId200MonitoringRunHistoryOutcome `json:"outcome,omitempty"` + + // Success Indicates whether the rule run was successful. + Success bool `json:"success"` + + // Timestamp Time of rule run. + Timestamp float32 `json:"timestamp"` + } `json:"history"` + LastRun struct { + Metrics struct { + // Duration Duration of most recent rule run. + Duration *float32 `json:"duration,omitempty"` + + // GapDurationS Duration in seconds of rule run gap. + GapDurationS *float32 `json:"gap_duration_s"` + GapRange *struct { + // Gte End of the gap range. + Gte string `json:"gte"` + + // Lte Start of the gap range. + Lte string `json:"lte"` + } `json:"gap_range"` + + // TotalAlertsCreated Total number of alerts created during last rule run. + TotalAlertsCreated *float32 `json:"total_alerts_created"` + + // TotalAlertsDetected Total number of alerts detected during last rule run. + TotalAlertsDetected *float32 `json:"total_alerts_detected"` + + // TotalIndexingDurationMs Total time spent indexing documents during last rule run in milliseconds. + TotalIndexingDurationMs *float32 `json:"total_indexing_duration_ms"` + + // TotalSearchDurationMs Total time spent performing Elasticsearch searches as measured by Kibana; includes network latency and time spent serializing or deserializing the request and response. + TotalSearchDurationMs *float32 `json:"total_search_duration_ms"` + } `json:"metrics"` + + // Timestamp Time of the most recent rule run. + Timestamp string `json:"timestamp"` + } `json:"last_run"` + } `json:"run"` + } `json:"monitoring,omitempty"` + + // MuteAll Indicates whether all alerts are muted. + MuteAll bool `json:"mute_all"` + MutedAlertIds []string `json:"muted_alert_ids"` + + // Name The name of the rule. + Name string `json:"name"` + + // NextRun Date and time of the next run of the rule. + NextRun *string `json:"next_run"` + + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen *PutAlertingRuleId200NotifyWhen `json:"notify_when"` + + // Params The parameters for the rule. + Params map[string]interface{} `json:"params"` + + // Revision The rule revision number. + Revision float32 `json:"revision"` + + // RuleTypeId The rule type identifier. + RuleTypeId string `json:"rule_type_id"` + + // Running Indicates whether the rule is running. + Running *bool `json:"running"` + Schedule struct { + // Interval The interval is specified in seconds, minutes, hours, or days. + Interval string `json:"interval"` + } `json:"schedule"` + + // ScheduledTaskId Identifier of the scheduled task. + ScheduledTaskId *string `json:"scheduled_task_id,omitempty"` + SnoozeSchedule *[]struct { + // Duration Duration of the rule snooze schedule. + Duration float32 `json:"duration"` + + // Id Identifier of the rule snooze schedule. + Id *string `json:"id,omitempty"` + RRule struct { + Byhour *[]float32 `json:"byhour"` + Byminute *[]float32 `json:"byminute"` + Bymonth *[]float32 `json:"bymonth"` + Bymonthday *[]float32 `json:"bymonthday"` + Bysecond *[]float32 `json:"bysecond"` + Bysetpos *[]float32 `json:"bysetpos"` + Byweekday *[]PutAlertingRuleId_200_SnoozeSchedule_RRule_Byweekday_Item `json:"byweekday"` + Byweekno *[]float32 `json:"byweekno"` + Byyearday *[]float32 `json:"byyearday"` + + // Count Number of times the rule should recur until it stops. + Count *float32 `json:"count,omitempty"` + + // Dtstart Rule start date in Coordinated Universal Time (UTC). + Dtstart string `json:"dtstart"` + + // Freq Indicates frequency of the rule. Options are YEARLY, MONTHLY, WEEKLY, DAILY. + Freq *PutAlertingRuleId200SnoozeScheduleRRuleFreq `json:"freq,omitempty"` + + // Interval Indicates the interval of frequency. For example, 1 and YEARLY is every 1 year, 2 and WEEKLY is every 2 weeks. + Interval *float32 `json:"interval,omitempty"` + + // Tzid Indicates timezone abbreviation. + Tzid string `json:"tzid"` + + // Until Recur the rule until this date. + Until *string `json:"until,omitempty"` + + // Wkst Indicates the start of week, defaults to Monday. + Wkst *PutAlertingRuleId200SnoozeScheduleRRuleWkst `json:"wkst,omitempty"` + } `json:"rRule"` + SkipRecurrences *[]string `json:"skipRecurrences,omitempty"` + } `json:"snooze_schedule,omitempty"` + Tags []string `json:"tags"` + + // Throttle Deprecated in 8.13.0. Use the `throttle` property in the action `frequency` object instead. The throttle interval, which defines how often an alert generates repeated actions. NOTE: You cannot specify the throttle interval at both the rule and action level. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Throttle *string `json:"throttle"` + + // UpdatedAt The date and time that the rule was updated most recently. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that updated this rule most recently. + UpdatedBy *string `json:"updated_by"` + + // ViewInAppRelativeUrl Relative URL to view rule in the app. + ViewInAppRelativeUrl *string `json:"view_in_app_relative_url"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest + } - return json.Marshal(object) + + return response, nil } -// Getter for additional properties for PackageListItem_Source. Returns the specified -// element and whether it was found -func (a PackageListItem_Source) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// ParsePostAlertingRuleIdDisableResponse parses an HTTP response from a PostAlertingRuleIdDisableWithResponse call +func ParsePostAlertingRuleIdDisableResponse(rsp *http.Response) (*PostAlertingRuleIdDisableResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return -} -// Setter for additional properties for PackageListItem_Source -func (a *PackageListItem_Source) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) + response := &PostAlertingRuleIdDisableResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - a.AdditionalProperties[fieldName] = value + + return response, nil } -// Override default JSON handling for PackageListItem_Source to handle AdditionalProperties -func (a *PackageListItem_Source) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// ParsePostAlertingRuleIdEnableResponse parses an HTTP response from a PostAlertingRuleIdEnableWithResponse call +func ParsePostAlertingRuleIdEnableResponse(rsp *http.Response) (*PostAlertingRuleIdEnableResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - if raw, found := object["license"]; found { - err = json.Unmarshal(raw, &a.License) - if err != nil { - return fmt.Errorf("error reading 'license': %w", err) - } - delete(object, "license") + response := &PostAlertingRuleIdEnableResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } - } - return nil + return response, nil } -// Override default JSON handling for PackageListItem_Source to handle AdditionalProperties -func (a PackageListItem_Source) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - object["license"], err = json.Marshal(a.License) +// ParsePostAlertingRuleIdMuteAllResponse parses an HTTP response from a PostAlertingRuleIdMuteAllWithResponse call +func ParsePostAlertingRuleIdMuteAllResponse(rsp *http.Response) (*PostAlertingRuleIdMuteAllResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return nil, fmt.Errorf("error marshaling 'license': %w", err) + return nil, err } - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } + response := &PostAlertingRuleIdMuteAllResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return json.Marshal(object) + + return response, nil } -// Getter for additional properties for PackagePolicy_Elasticsearch_Privileges. Returns the specified -// element and whether it was found -func (a PackagePolicy_Elasticsearch_Privileges) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// ParsePostAlertingRuleIdUnmuteAllResponse parses an HTTP response from a PostAlertingRuleIdUnmuteAllWithResponse call +func ParsePostAlertingRuleIdUnmuteAllResponse(rsp *http.Response) (*PostAlertingRuleIdUnmuteAllResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return -} -// Setter for additional properties for PackagePolicy_Elasticsearch_Privileges -func (a *PackagePolicy_Elasticsearch_Privileges) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) + response := &PostAlertingRuleIdUnmuteAllResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - a.AdditionalProperties[fieldName] = value + + return response, nil } -// Override default JSON handling for PackagePolicy_Elasticsearch_Privileges to handle AdditionalProperties -func (a *PackagePolicy_Elasticsearch_Privileges) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// ParsePostAlertingRuleIdUpdateApiKeyResponse parses an HTTP response from a PostAlertingRuleIdUpdateApiKeyWithResponse call +func ParsePostAlertingRuleIdUpdateApiKeyResponse(rsp *http.Response) (*PostAlertingRuleIdUpdateApiKeyResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - if raw, found := object["cluster"]; found { - err = json.Unmarshal(raw, &a.Cluster) - if err != nil { - return fmt.Errorf("error reading 'cluster': %w", err) - } - delete(object, "cluster") + response := &PostAlertingRuleIdUpdateApiKeyResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } - } - return nil + return response, nil } -// Override default JSON handling for PackagePolicy_Elasticsearch_Privileges to handle AdditionalProperties -func (a PackagePolicy_Elasticsearch_Privileges) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) +// ParsePostAlertingRuleIdSnoozeScheduleResponse parses an HTTP response from a PostAlertingRuleIdSnoozeScheduleWithResponse call +func ParsePostAlertingRuleIdSnoozeScheduleResponse(rsp *http.Response) (*PostAlertingRuleIdSnoozeScheduleResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - if a.Cluster != nil { - object["cluster"], err = json.Marshal(a.Cluster) - if err != nil { - return nil, fmt.Errorf("error marshaling 'cluster': %w", err) - } + response := &PostAlertingRuleIdSnoozeScheduleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Body struct { + Schedule struct { + Custom *struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom,omitempty"` + + // Id Identifier of the snooze schedule. + Id string `json:"id"` + } `json:"schedule"` + } `json:"body"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest + } - return json.Marshal(object) + + return response, nil } -// Getter for additional properties for PackagePolicy_Elasticsearch. Returns the specified -// element and whether it was found -func (a PackagePolicy_Elasticsearch) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] +// ParseDeleteAlertingRuleRuleidSnoozeScheduleScheduleidResponse parses an HTTP response from a DeleteAlertingRuleRuleidSnoozeScheduleScheduleidWithResponse call +func ParseDeleteAlertingRuleRuleidSnoozeScheduleScheduleidResponse(rsp *http.Response) (*DeleteAlertingRuleRuleidSnoozeScheduleScheduleidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return -} -// Setter for additional properties for PackagePolicy_Elasticsearch -func (a *PackagePolicy_Elasticsearch) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) + response := &DeleteAlertingRuleRuleidSnoozeScheduleScheduleidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - a.AdditionalProperties[fieldName] = value + + return response, nil } -// Override default JSON handling for PackagePolicy_Elasticsearch to handle AdditionalProperties -func (a *PackagePolicy_Elasticsearch) UnmarshalJSON(b []byte) error { - object := make(map[string]json.RawMessage) - err := json.Unmarshal(b, &object) +// ParsePostAlertingRuleRuleIdAlertAlertIdMuteResponse parses an HTTP response from a PostAlertingRuleRuleIdAlertAlertIdMuteWithResponse call +func ParsePostAlertingRuleRuleIdAlertAlertIdMuteResponse(rsp *http.Response) (*PostAlertingRuleRuleIdAlertAlertIdMuteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - if raw, found := object["privileges"]; found { - err = json.Unmarshal(raw, &a.Privileges) - if err != nil { - return fmt.Errorf("error reading 'privileges': %w", err) - } - delete(object, "privileges") + response := &PostAlertingRuleRuleIdAlertAlertIdMuteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal - } - } - return nil + return response, nil } -// Override default JSON handling for PackagePolicy_Elasticsearch to handle AdditionalProperties -func (a PackagePolicy_Elasticsearch) MarshalJSON() ([]byte, error) { - var err error - object := make(map[string]json.RawMessage) - - if a.Privileges != nil { - object["privileges"], err = json.Marshal(a.Privileges) - if err != nil { - return nil, fmt.Errorf("error marshaling 'privileges': %w", err) - } +// ParsePostAlertingRuleRuleIdAlertAlertIdUnmuteResponse parses an HTTP response from a PostAlertingRuleRuleIdAlertAlertIdUnmuteWithResponse call +func ParsePostAlertingRuleRuleIdAlertAlertIdUnmuteResponse(rsp *http.Response) (*PostAlertingRuleRuleIdAlertAlertIdUnmuteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } + response := &PostAlertingRuleRuleIdAlertAlertIdUnmuteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return json.Marshal(object) -} -// Getter for additional properties for UpdateConnectorConfig. Returns the specified -// element and whether it was found -func (a UpdateConnectorConfig) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] - } - return + return response, nil } -// Setter for additional properties for UpdateConnectorConfig -func (a *UpdateConnectorConfig) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) +// ParseGetRuleTypesResponse parses an HTTP response from a GetRuleTypesWithResponse call +func ParseGetRuleTypesResponse(rsp *http.Response) (*GetRuleTypesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - a.AdditionalProperties[fieldName] = value -} -// Getter for additional properties for UpdateConnectorSecrets. Returns the specified -// element and whether it was found -func (a UpdateConnectorSecrets) Get(fieldName string) (value interface{}, found bool) { - if a.AdditionalProperties != nil { - value, found = a.AdditionalProperties[fieldName] + response := &GetRuleTypesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return -} -// Setter for additional properties for UpdateConnectorSecrets -func (a *UpdateConnectorSecrets) Set(fieldName string, value interface{}) { - if a.AdditionalProperties == nil { - a.AdditionalProperties = make(map[string]interface{}) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []struct { + // ActionGroups An explicit list of groups for which the rule type can schedule actions, each with the action group's unique ID and human readable name. Rule actions validation uses this configuration to ensure that groups are valid. + ActionGroups *[]struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"action_groups,omitempty"` + + // ActionVariables A list of action variables that the rule type makes available via context and state in action parameter templates, and a short human readable description. When you create a rule in Kibana, it uses this information to prompt you for these variables in action parameter editors. + ActionVariables *struct { + Context *[]struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + UseWithTripleBracesInTemplates *bool `json:"useWithTripleBracesInTemplates,omitempty"` + } `json:"context,omitempty"` + Params *[]struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"params,omitempty"` + State *[]struct { + Description *string `json:"description,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"state,omitempty"` + } `json:"action_variables,omitempty"` + + // Alerts Details for writing alerts as data documents for this rule type. + Alerts *struct { + // Context The namespace for this rule type. + Context *GetRuleTypes200AlertsContext `json:"context,omitempty"` + + // Dynamic Indicates whether new fields are added dynamically. + Dynamic *GetRuleTypes200AlertsDynamic `json:"dynamic,omitempty"` + + // IsSpaceAware Indicates whether the alerts are space-aware. If true, space-specific alert indices are used. + IsSpaceAware *bool `json:"isSpaceAware,omitempty"` + Mappings *struct { + // FieldMap Mapping information for each field supported in alerts as data documents for this rule type. For more information about mapping parameters, refer to the Elasticsearch documentation. + FieldMap *map[string]AlertingFieldmapProperties `json:"fieldMap,omitempty"` + } `json:"mappings,omitempty"` + + // SecondaryAlias A secondary alias. It is typically used to support the signals alias for detection rules. + SecondaryAlias *string `json:"secondaryAlias,omitempty"` + + // ShouldWrite Indicates whether the rule should write out alerts as data. + ShouldWrite *bool `json:"shouldWrite,omitempty"` + + // UseEcs Indicates whether to include the ECS component template for the alerts. + UseEcs *bool `json:"useEcs,omitempty"` + + // UseLegacyAlerts Indicates whether to include the legacy component template for the alerts. + UseLegacyAlerts *bool `json:"useLegacyAlerts,omitempty"` + } `json:"alerts,omitempty"` + + // AuthorizedConsumers The list of the plugins IDs that have access to the rule type. + AuthorizedConsumers *struct { + Alerts *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"alerts,omitempty"` + Apm *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"apm,omitempty"` + Discover *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"discover,omitempty"` + Infrastructure *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"infrastructure,omitempty"` + Logs *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"logs,omitempty"` + Ml *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"ml,omitempty"` + Monitoring *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"monitoring,omitempty"` + Siem *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"siem,omitempty"` + Slo *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"slo,omitempty"` + StackAlerts *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"stackAlerts,omitempty"` + Uptime *struct { + All *bool `json:"all,omitempty"` + Read *bool `json:"read,omitempty"` + } `json:"uptime,omitempty"` + } `json:"authorized_consumers,omitempty"` + + // Category The rule category, which is used by features such as category-specific maintenance windows. + Category *GetRuleTypes200Category `json:"category,omitempty"` + + // DefaultActionGroupId The default identifier for the rule type group. + DefaultActionGroupId *string `json:"default_action_group_id,omitempty"` + + // DoesSetRecoveryContext Indicates whether the rule passes context variables to its recovery action. + DoesSetRecoveryContext *bool `json:"does_set_recovery_context,omitempty"` + + // EnabledInLicense Indicates whether the rule type is enabled or disabled based on the subscription. + EnabledInLicense *bool `json:"enabled_in_license,omitempty"` + + // HasAlertsMappings Indicates whether the rule type has custom mappings for the alert data. + HasAlertsMappings *bool `json:"has_alerts_mappings,omitempty"` + HasFieldsForAAD *bool `json:"has_fields_for_a_a_d,omitempty"` + + // Id The unique identifier for the rule type. + Id *string `json:"id,omitempty"` + + // IsExportable Indicates whether the rule type is exportable in **Stack Management > Saved Objects**. + IsExportable *bool `json:"is_exportable,omitempty"` + + // MinimumLicenseRequired The subscriptions required to use the rule type. + MinimumLicenseRequired *string `json:"minimum_license_required,omitempty"` + + // Name The descriptive name of the rule type. + Name *string `json:"name,omitempty"` + + // Producer An identifier for the application that produces this rule type. + Producer *string `json:"producer,omitempty"` + + // RecoveryActionGroup An action group to use when an alert goes from an active state to an inactive one. + RecoveryActionGroup *struct { + Id *string `json:"id,omitempty"` + Name *string `json:"name,omitempty"` + } `json:"recovery_action_group,omitempty"` + RuleTaskTimeout *string `json:"rule_task_timeout,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Alerting401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + } - a.AdditionalProperties[fieldName] = value -} - -// AsAgentPolicyPackagePolicies1Inputs1StreamsVars0 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars0 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars0() (AgentPolicyPackagePolicies1Inputs1StreamsVars0, error) { - var body AgentPolicyPackagePolicies1Inputs1StreamsVars0 - err := json.Unmarshal(t.union, &body) - return body, err -} -// FromAgentPolicyPackagePolicies1Inputs1StreamsVars0 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars0 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars0(v AgentPolicyPackagePolicies1Inputs1StreamsVars0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars0 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars0 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars0(v AgentPolicyPackagePolicies1Inputs1StreamsVars0) error { - b, err := json.Marshal(v) +// ParseGetAlertingRulesFindResponse parses an HTTP response from a GetAlertingRulesFindWithResponse call +func ParseGetAlertingRulesFindResponse(rsp *http.Response) (*GetAlertingRulesFindResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetAlertingRulesFindResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsAgentPolicyPackagePolicies1Inputs1StreamsVars1 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars1 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars1() (AgentPolicyPackagePolicies1Inputs1StreamsVars1, error) { - var body AgentPolicyPackagePolicies1Inputs1StreamsVars1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Actions []struct { + // AlertsFilter Defines a period that limits whether the action runs. + AlertsFilter *struct { + Query *struct { + // Dsl A filter written in Elasticsearch Query Domain Specific Language (DSL). + Dsl *string `json:"dsl,omitempty"` + + // Filters A filter written in Elasticsearch Query Domain Specific Language (DSL) as defined in the `kbn-es-query` package. + Filters []struct { + State *struct { + // Store A filter can be either specific to an application context or applied globally. + Store GetAlertingRulesFind200ActionsAlertsFilterQueryFiltersStateStore `json:"store"` + } `json:"$state,omitempty"` + Meta map[string]interface{} `json:"meta"` + Query *map[string]interface{} `json:"query,omitempty"` + } `json:"filters"` -// FromAgentPolicyPackagePolicies1Inputs1StreamsVars1 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars1 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars1(v AgentPolicyPackagePolicies1Inputs1StreamsVars1) error { - b, err := json.Marshal(v) - t.union = b - return err -} + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query,omitempty"` + Timeframe *struct { + // Days Defines the days of the week that the action can run, represented as an array of numbers. For example, `1` represents Monday. An empty array is equivalent to specifying all the days of the week. + Days []GetAlertingRulesFind200ActionsAlertsFilterTimeframeDays `json:"days"` + Hours struct { + // End The end of the time frame in 24-hour notation (`hh:mm`). + End string `json:"end"` + + // Start The start of the time frame in 24-hour notation (`hh:mm`). + Start string `json:"start"` + } `json:"hours"` + + // Timezone The ISO time zone for the `hours` values. Values such as `UTC` and `UTC+1` also work but lack built-in daylight savings time support and are not recommended. + Timezone string `json:"timezone"` + } `json:"timeframe,omitempty"` + } `json:"alerts_filter,omitempty"` + + // ConnectorTypeId The type of connector. This property appears in responses but cannot be set in requests. + ConnectorTypeId string `json:"connector_type_id"` + Frequency *struct { + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen GetAlertingRulesFind200ActionsFrequencyNotifyWhen `json:"notify_when"` + + // Summary Indicates whether the action is a summary. + Summary bool `json:"summary"` + + // Throttle The throttle interval, which defines how often an alert generates repeated actions. It is specified in seconds, minutes, hours, or days and is applicable only if 'notify_when' is set to 'onThrottleInterval'. NOTE: You cannot specify the throttle interval at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + Throttle *string `json:"throttle"` + } `json:"frequency,omitempty"` + + // Group The group name, which affects when the action runs (for example, when the threshold is met or when the alert is recovered). Each rule type has a list of valid action group names. If you don't need to group actions, set to `default`. + Group *string `json:"group,omitempty"` + + // Id The identifier for the connector saved object. + Id string `json:"id"` + + // Params The parameters for the action, which are sent to the connector. The `params` are handled as Mustache templates and passed a default set of context. + Params map[string]interface{} `json:"params"` + + // UseAlertDataForTemplate Indicates whether to use alert data as a template. + UseAlertDataForTemplate *bool `json:"use_alert_data_for_template,omitempty"` + + // Uuid A universally unique identifier (UUID) for the action. + Uuid *string `json:"uuid,omitempty"` + } `json:"actions"` + ActiveSnoozes *[]string `json:"active_snoozes,omitempty"` + + // AlertDelay Indicates that an alert occurs only when the specified number of consecutive runs met the rule conditions. + AlertDelay *struct { + // Active The number of consecutive runs that must meet the rule conditions. + Active float32 `json:"active"` + } `json:"alert_delay,omitempty"` + + // ApiKeyCreatedByUser Indicates whether the API key that is associated with the rule was created by the user. + ApiKeyCreatedByUser *bool `json:"api_key_created_by_user"` + + // ApiKeyOwner The owner of the API key that is associated with the rule and used to run background tasks. + ApiKeyOwner *string `json:"api_key_owner"` + Artifacts *struct { + Dashboards *[]struct { + Id string `json:"id"` + } `json:"dashboards,omitempty"` + InvestigationGuide *struct { + // Blob User-created content that describes alert causes and remdiation. + Blob string `json:"blob"` + } `json:"investigation_guide,omitempty"` + } `json:"artifacts,omitempty"` + + // Consumer The name of the application or feature that owns the rule. For example: `alerts`, `apm`, `discover`, `infrastructure`, `logs`, `metrics`, `ml`, `monitoring`, `securitySolution`, `siem`, `stackAlerts`, or `uptime`. + Consumer string `json:"consumer"` + + // CreatedAt The date and time that the rule was created. + CreatedAt string `json:"created_at"` -// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars1 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars1 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars1(v AgentPolicyPackagePolicies1Inputs1StreamsVars1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + // CreatedBy The identifier for the user that created the rule. + CreatedBy *string `json:"created_by"` - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + // Enabled Indicates whether you want to run the rule on an interval basis after it is created. + Enabled bool `json:"enabled"` + ExecutionStatus struct { + Error *struct { + // Message Error message. + Message string `json:"message"` -// AsAgentPolicyPackagePolicies1Inputs1StreamsVars2 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars2 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars2() (AgentPolicyPackagePolicies1Inputs1StreamsVars2, error) { - var body AgentPolicyPackagePolicies1Inputs1StreamsVars2 - err := json.Unmarshal(t.union, &body) - return body, err -} + // Reason Reason for error. + Reason GetAlertingRulesFind200ExecutionStatusErrorReason `json:"reason"` + } `json:"error,omitempty"` -// FromAgentPolicyPackagePolicies1Inputs1StreamsVars2 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars2 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars2(v AgentPolicyPackagePolicies1Inputs1StreamsVars2) error { - b, err := json.Marshal(v) - t.union = b - return err -} + // LastDuration Duration of last execution of the rule. + LastDuration *float32 `json:"last_duration,omitempty"` -// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars2 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars2 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars2(v AgentPolicyPackagePolicies1Inputs1StreamsVars2) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + // LastExecutionDate The date and time when rule was executed last. + LastExecutionDate string `json:"last_execution_date"` - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + // Status Status of rule execution. + Status GetAlertingRulesFind200ExecutionStatusStatus `json:"status"` + Warning *struct { + // Message Warning message. + Message string `json:"message"` -// AsAgentPolicyPackagePolicies1Inputs1StreamsVars3 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars3 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars3() (AgentPolicyPackagePolicies1Inputs1StreamsVars3, error) { - var body AgentPolicyPackagePolicies1Inputs1StreamsVars3 - err := json.Unmarshal(t.union, &body) - return body, err -} + // Reason Reason for warning. + Reason GetAlertingRulesFind200ExecutionStatusWarningReason `json:"reason"` + } `json:"warning,omitempty"` + } `json:"execution_status"` -// FromAgentPolicyPackagePolicies1Inputs1StreamsVars3 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars3 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars3(v AgentPolicyPackagePolicies1Inputs1StreamsVars3) error { - b, err := json.Marshal(v) - t.union = b - return err -} + // Flapping When flapping detection is turned on, alerts that switch quickly between active and recovered states are identified as “flapping” and notifications are reduced. + Flapping *struct { + // LookBackWindow The minimum number of runs in which the threshold must be met. + LookBackWindow float32 `json:"look_back_window"` -// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars3 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars3 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars3(v AgentPolicyPackagePolicies1Inputs1StreamsVars3) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + // StatusChangeThreshold The minimum number of times an alert must switch states in the look back window. + StatusChangeThreshold float32 `json:"status_change_threshold"` + } `json:"flapping"` - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + // Id The identifier for the rule. + Id string `json:"id"` -// AsAgentPolicyPackagePolicies1Inputs1StreamsVars4 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars4 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars4() (AgentPolicyPackagePolicies1Inputs1StreamsVars4, error) { - var body AgentPolicyPackagePolicies1Inputs1StreamsVars4 - err := json.Unmarshal(t.union, &body) - return body, err -} + // IsSnoozedUntil The date when the rule will no longer be snoozed. + IsSnoozedUntil *string `json:"is_snoozed_until"` + LastRun *struct { + AlertsCount struct { + // Active Number of active alerts during last run. + Active *float32 `json:"active"` + + // Ignored Number of ignored alerts during last run. + Ignored *float32 `json:"ignored"` + + // New Number of new alerts during last run. + New *float32 `json:"new"` + + // Recovered Number of recovered alerts during last run. + Recovered *float32 `json:"recovered"` + } `json:"alerts_count"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome GetAlertingRulesFind200LastRunOutcome `json:"outcome"` + OutcomeMsg *[]string `json:"outcome_msg"` + + // OutcomeOrder Order of the outcome. + OutcomeOrder *float32 `json:"outcome_order,omitempty"` + + // Warning Warning of last rule execution. + Warning *GetAlertingRulesFind200LastRunWarning `json:"warning"` + } `json:"last_run"` + MappedParams *map[string]interface{} `json:"mapped_params,omitempty"` + + // Monitoring Monitoring details of the rule. + Monitoring *struct { + // Run Rule run details. + Run struct { + // CalculatedMetrics Calculation of different percentiles and success ratio. + CalculatedMetrics struct { + P50 *float32 `json:"p50,omitempty"` + P95 *float32 `json:"p95,omitempty"` + P99 *float32 `json:"p99,omitempty"` + SuccessRatio float32 `json:"success_ratio"` + } `json:"calculated_metrics"` + + // History History of the rule run. + History []struct { + // Duration Duration of the rule run. + Duration *float32 `json:"duration,omitempty"` + + // Outcome Outcome of last run of the rule. Value could be succeeded, warning or failed. + Outcome *GetAlertingRulesFind200MonitoringRunHistoryOutcome `json:"outcome,omitempty"` + + // Success Indicates whether the rule run was successful. + Success bool `json:"success"` + + // Timestamp Time of rule run. + Timestamp float32 `json:"timestamp"` + } `json:"history"` + LastRun struct { + Metrics struct { + // Duration Duration of most recent rule run. + Duration *float32 `json:"duration,omitempty"` + + // GapDurationS Duration in seconds of rule run gap. + GapDurationS *float32 `json:"gap_duration_s"` + GapRange *struct { + // Gte End of the gap range. + Gte string `json:"gte"` + + // Lte Start of the gap range. + Lte string `json:"lte"` + } `json:"gap_range"` + + // TotalAlertsCreated Total number of alerts created during last rule run. + TotalAlertsCreated *float32 `json:"total_alerts_created"` + + // TotalAlertsDetected Total number of alerts detected during last rule run. + TotalAlertsDetected *float32 `json:"total_alerts_detected"` + + // TotalIndexingDurationMs Total time spent indexing documents during last rule run in milliseconds. + TotalIndexingDurationMs *float32 `json:"total_indexing_duration_ms"` + + // TotalSearchDurationMs Total time spent performing Elasticsearch searches as measured by Kibana; includes network latency and time spent serializing or deserializing the request and response. + TotalSearchDurationMs *float32 `json:"total_search_duration_ms"` + } `json:"metrics"` + + // Timestamp Time of the most recent rule run. + Timestamp string `json:"timestamp"` + } `json:"last_run"` + } `json:"run"` + } `json:"monitoring,omitempty"` + + // MuteAll Indicates whether all alerts are muted. + MuteAll bool `json:"mute_all"` + MutedAlertIds []string `json:"muted_alert_ids"` -// FromAgentPolicyPackagePolicies1Inputs1StreamsVars4 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars4 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars4(v AgentPolicyPackagePolicies1Inputs1StreamsVars4) error { - b, err := json.Marshal(v) - t.union = b - return err -} + // Name The name of the rule. + Name string `json:"name"` -// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars4 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars4 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars4(v AgentPolicyPackagePolicies1Inputs1StreamsVars4) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + // NextRun Date and time of the next run of the rule. + NextRun *string `json:"next_run"` - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + // NotifyWhen Indicates how often alerts generate actions. Valid values include: `onActionGroupChange`: Actions run when the alert status changes; `onActiveAlert`: Actions run when the alert becomes active and at each check interval while the rule conditions are met; `onThrottleInterval`: Actions run when the alert becomes active and at the interval specified in the throttle property while the rule conditions are met. NOTE: You cannot specify `notify_when` at both the rule and action level. The recommended method is to set it for each action. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + NotifyWhen *GetAlertingRulesFind200NotifyWhen `json:"notify_when"` -// AsAgentPolicyPackagePolicies1Inputs1StreamsVars5 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1StreamsVars5 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1StreamsVars5() (AgentPolicyPackagePolicies1Inputs1StreamsVars5, error) { - var body AgentPolicyPackagePolicies1Inputs1StreamsVars5 - err := json.Unmarshal(t.union, &body) - return body, err -} + // Params The parameters for the rule. + Params map[string]interface{} `json:"params"` -// FromAgentPolicyPackagePolicies1Inputs1StreamsVars5 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1StreamsVars5 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1StreamsVars5(v AgentPolicyPackagePolicies1Inputs1StreamsVars5) error { - b, err := json.Marshal(v) - t.union = b - return err -} + // Revision The rule revision number. + Revision float32 `json:"revision"` -// MergeAgentPolicyPackagePolicies1Inputs1StreamsVars5 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1StreamsVars5 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1StreamsVars5(v AgentPolicyPackagePolicies1Inputs1StreamsVars5) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + // RuleTypeId The rule type identifier. + RuleTypeId string `json:"rule_type_id"` - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + // Running Indicates whether the rule is running. + Running *bool `json:"running"` + Schedule struct { + // Interval The interval is specified in seconds, minutes, hours, or days. + Interval string `json:"interval"` + } `json:"schedule"` -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + // ScheduledTaskId Identifier of the scheduled task. + ScheduledTaskId *string `json:"scheduled_task_id,omitempty"` + SnoozeSchedule *[]struct { + // Duration Duration of the rule snooze schedule. + Duration float32 `json:"duration"` + + // Id Identifier of the rule snooze schedule. + Id *string `json:"id,omitempty"` + RRule struct { + Byhour *[]float32 `json:"byhour"` + Byminute *[]float32 `json:"byminute"` + Bymonth *[]float32 `json:"bymonth"` + Bymonthday *[]float32 `json:"bymonthday"` + Bysecond *[]float32 `json:"bysecond"` + Bysetpos *[]float32 `json:"bysetpos"` + Byweekday *[]GetAlertingRulesFind_200_SnoozeSchedule_RRule_Byweekday_Item `json:"byweekday"` + Byweekno *[]float32 `json:"byweekno"` + Byyearday *[]float32 `json:"byyearday"` + + // Count Number of times the rule should recur until it stops. + Count *float32 `json:"count,omitempty"` + + // Dtstart Rule start date in Coordinated Universal Time (UTC). + Dtstart string `json:"dtstart"` + + // Freq Indicates frequency of the rule. Options are YEARLY, MONTHLY, WEEKLY, DAILY. + Freq *GetAlertingRulesFind200SnoozeScheduleRRuleFreq `json:"freq,omitempty"` + + // Interval Indicates the interval of frequency. For example, 1 and YEARLY is every 1 year, 2 and WEEKLY is every 2 weeks. + Interval *float32 `json:"interval,omitempty"` + + // Tzid Indicates timezone abbreviation. + Tzid string `json:"tzid"` + + // Until Recur the rule until this date. + Until *string `json:"until,omitempty"` + + // Wkst Indicates the start of week, defaults to Monday. + Wkst *GetAlertingRulesFind200SnoozeScheduleRRuleWkst `json:"wkst,omitempty"` + } `json:"rRule"` + SkipRecurrences *[]string `json:"skipRecurrences,omitempty"` + } `json:"snooze_schedule,omitempty"` + Tags []string `json:"tags"` + + // Throttle Deprecated in 8.13.0. Use the `throttle` property in the action `frequency` object instead. The throttle interval, which defines how often an alert generates repeated actions. NOTE: You cannot specify the throttle interval at both the rule and action level. If you set it at the rule level then update the rule in Kibana, it is automatically changed to use action-specific values. + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + Throttle *string `json:"throttle"` -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Streams_Vars_AdditionalProperties) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + // UpdatedAt The date and time that the rule was updated most recently. + UpdatedAt string `json:"updated_at"` -// AsAgentPolicyPackagePolicies1Inputs1Vars0 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars0 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars0() (AgentPolicyPackagePolicies1Inputs1Vars0, error) { - var body AgentPolicyPackagePolicies1Inputs1Vars0 - err := json.Unmarshal(t.union, &body) - return body, err -} + // UpdatedBy The identifier for the user that updated this rule most recently. + UpdatedBy *string `json:"updated_by"` -// FromAgentPolicyPackagePolicies1Inputs1Vars0 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars0 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars0(v AgentPolicyPackagePolicies1Inputs1Vars0) error { - b, err := json.Marshal(v) - t.union = b - return err + // ViewInAppRelativeUrl Relative URL to view rule in the app. + ViewInAppRelativeUrl *string `json:"view_in_app_relative_url"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -// MergeAgentPolicyPackagePolicies1Inputs1Vars0 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars0 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars0(v AgentPolicyPackagePolicies1Inputs1Vars0) error { - b, err := json.Marshal(v) +// ParseCreateAgentKeyResponse parses an HTTP response from a CreateAgentKeyWithResponse call +func ParseCreateAgentKeyResponse(rsp *http.Response) (*CreateAgentKeyResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateAgentKeyResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsAgentPolicyPackagePolicies1Inputs1Vars1 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars1 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars1() (AgentPolicyPackagePolicies1Inputs1Vars1, error) { - var body AgentPolicyPackagePolicies1Inputs1Vars1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest APMUIAgentKeysResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromAgentPolicyPackagePolicies1Inputs1Vars1 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars1 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars1(v AgentPolicyPackagePolicies1Inputs1Vars1) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeAgentPolicyPackagePolicies1Inputs1Vars1 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars1 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars1(v AgentPolicyPackagePolicies1Inputs1Vars1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest APMUI403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsAgentPolicyPackagePolicies1Inputs1Vars2 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars2 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars2() (AgentPolicyPackagePolicies1Inputs1Vars2, error) { - var body AgentPolicyPackagePolicies1Inputs1Vars2 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest APMUI500Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// FromAgentPolicyPackagePolicies1Inputs1Vars2 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars2 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars2(v AgentPolicyPackagePolicies1Inputs1Vars2) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeAgentPolicyPackagePolicies1Inputs1Vars2 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars2 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars2(v AgentPolicyPackagePolicies1Inputs1Vars2) error { - b, err := json.Marshal(v) +// ParseSaveApmServerSchemaResponse parses an HTTP response from a SaveApmServerSchemaWithResponse call +func ParseSaveApmServerSchemaResponse(rsp *http.Response) (*SaveApmServerSchemaResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &SaveApmServerSchemaResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsAgentPolicyPackagePolicies1Inputs1Vars3 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars3 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars3() (AgentPolicyPackagePolicies1Inputs1Vars3, error) { - var body AgentPolicyPackagePolicies1Inputs1Vars3 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromAgentPolicyPackagePolicies1Inputs1Vars3 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars3 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars3(v AgentPolicyPackagePolicies1Inputs1Vars3) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest APMUI403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest APMUI404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + } + + return response, nil } -// MergeAgentPolicyPackagePolicies1Inputs1Vars3 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars3 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars3(v AgentPolicyPackagePolicies1Inputs1Vars3) error { - b, err := json.Marshal(v) +// ParseCreateAnnotationResponse parses an HTTP response from a CreateAnnotationWithResponse call +func ParseCreateAnnotationResponse(rsp *http.Response) (*CreateAnnotationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateAnnotationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsAgentPolicyPackagePolicies1Inputs1Vars4 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars4 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars4() (AgentPolicyPackagePolicies1Inputs1Vars4, error) { - var body AgentPolicyPackagePolicies1Inputs1Vars4 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest APMUICreateAnnotationResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromAgentPolicyPackagePolicies1Inputs1Vars4 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars4 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars4(v AgentPolicyPackagePolicies1Inputs1Vars4) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest APMUI403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest APMUI404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + } + + return response, nil } -// MergeAgentPolicyPackagePolicies1Inputs1Vars4 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars4 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars4(v AgentPolicyPackagePolicies1Inputs1Vars4) error { - b, err := json.Marshal(v) +// ParseGetAnnotationResponse parses an HTTP response from a GetAnnotationWithResponse call +func ParseGetAnnotationResponse(rsp *http.Response) (*GetAnnotationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetAnnotationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsAgentPolicyPackagePolicies1Inputs1Vars5 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as a AgentPolicyPackagePolicies1Inputs1Vars5 -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) AsAgentPolicyPackagePolicies1Inputs1Vars5() (AgentPolicyPackagePolicies1Inputs1Vars5, error) { - var body AgentPolicyPackagePolicies1Inputs1Vars5 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest APMUIAnnotationSearchResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromAgentPolicyPackagePolicies1Inputs1Vars5 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties as the provided AgentPolicyPackagePolicies1Inputs1Vars5 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) FromAgentPolicyPackagePolicies1Inputs1Vars5(v AgentPolicyPackagePolicies1Inputs1Vars5) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest APMUI500Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeAgentPolicyPackagePolicies1Inputs1Vars5 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Inputs1Vars5 -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MergeAgentPolicyPackagePolicies1Inputs1Vars5(v AgentPolicyPackagePolicies1Inputs1Vars5) error { - b, err := json.Marshal(v) +// ParseDeleteAgentConfigurationResponse parses an HTTP response from a DeleteAgentConfigurationWithResponse call +func ParseDeleteAgentConfigurationResponse(rsp *http.Response) (*DeleteAgentConfigurationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteAgentConfigurationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest APMUIDeleteAgentConfigurationsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *AgentPolicy_PackagePolicies_1_Inputs_1_Vars_AdditionalProperties) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsAgentPolicyPackagePolicies1Inputs0 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs as a AgentPolicyPackagePolicies1Inputs0 -func (t AgentPolicy_PackagePolicies_1_Inputs) AsAgentPolicyPackagePolicies1Inputs0() (AgentPolicyPackagePolicies1Inputs0, error) { - var body AgentPolicyPackagePolicies1Inputs0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromAgentPolicyPackagePolicies1Inputs0 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs as the provided AgentPolicyPackagePolicies1Inputs0 -func (t *AgentPolicy_PackagePolicies_1_Inputs) FromAgentPolicyPackagePolicies1Inputs0(v AgentPolicyPackagePolicies1Inputs0) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest APMUI403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest APMUI404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + } + + return response, nil } -// MergeAgentPolicyPackagePolicies1Inputs0 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs, using the provided AgentPolicyPackagePolicies1Inputs0 -func (t *AgentPolicy_PackagePolicies_1_Inputs) MergeAgentPolicyPackagePolicies1Inputs0(v AgentPolicyPackagePolicies1Inputs0) error { - b, err := json.Marshal(v) +// ParseGetAgentConfigurationsResponse parses an HTTP response from a GetAgentConfigurationsWithResponse call +func ParseGetAgentConfigurationsResponse(rsp *http.Response) (*GetAgentConfigurationsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetAgentConfigurationsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsAgentPolicyPackagePolicies1Inputs1 returns the union data inside the AgentPolicy_PackagePolicies_1_Inputs as a AgentPolicyPackagePolicies1Inputs1 -func (t AgentPolicy_PackagePolicies_1_Inputs) AsAgentPolicyPackagePolicies1Inputs1() (AgentPolicyPackagePolicies1Inputs1, error) { - var body AgentPolicyPackagePolicies1Inputs1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest APMUIAgentConfigurationsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromAgentPolicyPackagePolicies1Inputs1 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Inputs as the provided AgentPolicyPackagePolicies1Inputs1 -func (t *AgentPolicy_PackagePolicies_1_Inputs) FromAgentPolicyPackagePolicies1Inputs1(v AgentPolicyPackagePolicies1Inputs1) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest APMUI404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// MergeAgentPolicyPackagePolicies1Inputs1 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Inputs, using the provided AgentPolicyPackagePolicies1Inputs1 -func (t *AgentPolicy_PackagePolicies_1_Inputs) MergeAgentPolicyPackagePolicies1Inputs1(v AgentPolicyPackagePolicies1Inputs1) error { - b, err := json.Marshal(v) - if err != nil { - return err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err + return response, nil } -func (t AgentPolicy_PackagePolicies_1_Inputs) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} +// ParseCreateUpdateAgentConfigurationResponse parses an HTTP response from a CreateUpdateAgentConfigurationWithResponse call +func ParseCreateUpdateAgentConfigurationResponse(rsp *http.Response) (*CreateUpdateAgentConfigurationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } -func (t *AgentPolicy_PackagePolicies_1_Inputs) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + response := &CreateUpdateAgentConfigurationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsAgentPolicyPackagePolicies1Vars10 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars10 -func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars10() (AgentPolicyPackagePolicies1Vars10, error) { - var body AgentPolicyPackagePolicies1Vars10 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromAgentPolicyPackagePolicies1Vars10 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars10 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars10(v AgentPolicyPackagePolicies1Vars10) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeAgentPolicyPackagePolicies1Vars10 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars10 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars10(v AgentPolicyPackagePolicies1Vars10) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest APMUI403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsAgentPolicyPackagePolicies1Vars11 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars11 -func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars11() (AgentPolicyPackagePolicies1Vars11, error) { - var body AgentPolicyPackagePolicies1Vars11 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest APMUI404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// FromAgentPolicyPackagePolicies1Vars11 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars11 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars11(v AgentPolicyPackagePolicies1Vars11) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeAgentPolicyPackagePolicies1Vars11 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars11 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars11(v AgentPolicyPackagePolicies1Vars11) error { - b, err := json.Marshal(v) +// ParseGetAgentNameForServiceResponse parses an HTTP response from a GetAgentNameForServiceWithResponse call +func ParseGetAgentNameForServiceResponse(rsp *http.Response) (*GetAgentNameForServiceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetAgentNameForServiceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsAgentPolicyPackagePolicies1Vars12 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars12 -func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars12() (AgentPolicyPackagePolicies1Vars12, error) { - var body AgentPolicyPackagePolicies1Vars12 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest APMUIServiceAgentNameResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromAgentPolicyPackagePolicies1Vars12 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars12 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars12(v AgentPolicyPackagePolicies1Vars12) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeAgentPolicyPackagePolicies1Vars12 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars12 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars12(v AgentPolicyPackagePolicies1Vars12) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest APMUI404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// AsAgentPolicyPackagePolicies1Vars13 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars13 -func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars13() (AgentPolicyPackagePolicies1Vars13, error) { - var body AgentPolicyPackagePolicies1Vars13 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromAgentPolicyPackagePolicies1Vars13 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars13 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars13(v AgentPolicyPackagePolicies1Vars13) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeAgentPolicyPackagePolicies1Vars13 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars13 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars13(v AgentPolicyPackagePolicies1Vars13) error { - b, err := json.Marshal(v) +// ParseGetEnvironmentsForServiceResponse parses an HTTP response from a GetEnvironmentsForServiceWithResponse call +func ParseGetEnvironmentsForServiceResponse(rsp *http.Response) (*GetEnvironmentsForServiceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetEnvironmentsForServiceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsAgentPolicyPackagePolicies1Vars14 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars14 -func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars14() (AgentPolicyPackagePolicies1Vars14, error) { - var body AgentPolicyPackagePolicies1Vars14 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest APMUIServiceEnvironmentsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromAgentPolicyPackagePolicies1Vars14 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars14 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars14(v AgentPolicyPackagePolicies1Vars14) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeAgentPolicyPackagePolicies1Vars14 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars14 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars14(v AgentPolicyPackagePolicies1Vars14) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest APMUI404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// AsAgentPolicyPackagePolicies1Vars15 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as a AgentPolicyPackagePolicies1Vars15 -func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) AsAgentPolicyPackagePolicies1Vars15() (AgentPolicyPackagePolicies1Vars15, error) { - var body AgentPolicyPackagePolicies1Vars15 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromAgentPolicyPackagePolicies1Vars15 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties as the provided AgentPolicyPackagePolicies1Vars15 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) FromAgentPolicyPackagePolicies1Vars15(v AgentPolicyPackagePolicies1Vars15) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeAgentPolicyPackagePolicies1Vars15 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties, using the provided AgentPolicyPackagePolicies1Vars15 -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MergeAgentPolicyPackagePolicies1Vars15(v AgentPolicyPackagePolicies1Vars15) error { - b, err := json.Marshal(v) +// ParseSearchSingleConfigurationResponse parses an HTTP response from a SearchSingleConfigurationWithResponse call +func ParseSearchSingleConfigurationResponse(rsp *http.Response) (*SearchSingleConfigurationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &SearchSingleConfigurationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest APMUISearchAgentConfigurationResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *AgentPolicy_PackagePolicies_1_Vars_1_AdditionalProperties) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsAgentPolicyPackagePolicies1Vars0 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars as a AgentPolicyPackagePolicies1Vars0 -func (t AgentPolicy_PackagePolicies_1_Vars) AsAgentPolicyPackagePolicies1Vars0() (AgentPolicyPackagePolicies1Vars0, error) { - var body AgentPolicyPackagePolicies1Vars0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromAgentPolicyPackagePolicies1Vars0 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars as the provided AgentPolicyPackagePolicies1Vars0 -func (t *AgentPolicy_PackagePolicies_1_Vars) FromAgentPolicyPackagePolicies1Vars0(v AgentPolicyPackagePolicies1Vars0) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest APMUI404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// MergeAgentPolicyPackagePolicies1Vars0 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars, using the provided AgentPolicyPackagePolicies1Vars0 -func (t *AgentPolicy_PackagePolicies_1_Vars) MergeAgentPolicyPackagePolicies1Vars0(v AgentPolicyPackagePolicies1Vars0) error { - b, err := json.Marshal(v) - if err != nil { - return err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsAgentPolicyPackagePolicies1Vars1 returns the union data inside the AgentPolicy_PackagePolicies_1_Vars as a AgentPolicyPackagePolicies1Vars1 -func (t AgentPolicy_PackagePolicies_1_Vars) AsAgentPolicyPackagePolicies1Vars1() (AgentPolicyPackagePolicies1Vars1, error) { - var body AgentPolicyPackagePolicies1Vars1 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromAgentPolicyPackagePolicies1Vars1 overwrites any union data inside the AgentPolicy_PackagePolicies_1_Vars as the provided AgentPolicyPackagePolicies1Vars1 -func (t *AgentPolicy_PackagePolicies_1_Vars) FromAgentPolicyPackagePolicies1Vars1(v AgentPolicyPackagePolicies1Vars1) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeAgentPolicyPackagePolicies1Vars1 performs a merge with any union data inside the AgentPolicy_PackagePolicies_1_Vars, using the provided AgentPolicyPackagePolicies1Vars1 -func (t *AgentPolicy_PackagePolicies_1_Vars) MergeAgentPolicyPackagePolicies1Vars1(v AgentPolicyPackagePolicies1Vars1) error { - b, err := json.Marshal(v) +// ParseGetSingleAgentConfigurationResponse parses an HTTP response from a GetSingleAgentConfigurationWithResponse call +func ParseGetSingleAgentConfigurationResponse(rsp *http.Response) (*GetSingleAgentConfigurationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetSingleAgentConfigurationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t AgentPolicy_PackagePolicies_1_Vars) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest APMUISingleAgentConfigurationResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *AgentPolicy_PackagePolicies_1_Vars) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsAgentPolicyPackagePolicies0 returns the union data inside the AgentPolicy_PackagePolicies as a AgentPolicyPackagePolicies0 -func (t AgentPolicy_PackagePolicies) AsAgentPolicyPackagePolicies0() (AgentPolicyPackagePolicies0, error) { - var body AgentPolicyPackagePolicies0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromAgentPolicyPackagePolicies0 overwrites any union data inside the AgentPolicy_PackagePolicies as the provided AgentPolicyPackagePolicies0 -func (t *AgentPolicy_PackagePolicies) FromAgentPolicyPackagePolicies0(v AgentPolicyPackagePolicies0) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest APMUI404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// MergeAgentPolicyPackagePolicies0 performs a merge with any union data inside the AgentPolicy_PackagePolicies, using the provided AgentPolicyPackagePolicies0 -func (t *AgentPolicy_PackagePolicies) MergeAgentPolicyPackagePolicies0(v AgentPolicyPackagePolicies0) error { - b, err := json.Marshal(v) - if err != nil { - return err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsAgentPolicyPackagePolicies1 returns the union data inside the AgentPolicy_PackagePolicies as a AgentPolicyPackagePolicies1 -func (t AgentPolicy_PackagePolicies) AsAgentPolicyPackagePolicies1() (AgentPolicyPackagePolicies1, error) { - var body AgentPolicyPackagePolicies1 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromAgentPolicyPackagePolicies1 overwrites any union data inside the AgentPolicy_PackagePolicies as the provided AgentPolicyPackagePolicies1 -func (t *AgentPolicy_PackagePolicies) FromAgentPolicyPackagePolicies1(v AgentPolicyPackagePolicies1) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeAgentPolicyPackagePolicies1 performs a merge with any union data inside the AgentPolicy_PackagePolicies, using the provided AgentPolicyPackagePolicies1 -func (t *AgentPolicy_PackagePolicies) MergeAgentPolicyPackagePolicies1(v AgentPolicyPackagePolicies1) error { - b, err := json.Marshal(v) +// ParseGetSourceMapsResponse parses an HTTP response from a GetSourceMapsWithResponse call +func ParseGetSourceMapsResponse(rsp *http.Response) (*GetSourceMapsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t AgentPolicy_PackagePolicies) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + response := &GetSourceMapsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t *AgentPolicy_PackagePolicies) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest APMUISourceMapsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsAgentPolicyGlobalDataTagsItemValue0 returns the union data inside the AgentPolicyGlobalDataTagsItem_Value as a AgentPolicyGlobalDataTagsItemValue0 -func (t AgentPolicyGlobalDataTagsItem_Value) AsAgentPolicyGlobalDataTagsItemValue0() (AgentPolicyGlobalDataTagsItemValue0, error) { - var body AgentPolicyGlobalDataTagsItemValue0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// FromAgentPolicyGlobalDataTagsItemValue0 overwrites any union data inside the AgentPolicyGlobalDataTagsItem_Value as the provided AgentPolicyGlobalDataTagsItemValue0 -func (t *AgentPolicyGlobalDataTagsItem_Value) FromAgentPolicyGlobalDataTagsItemValue0(v AgentPolicyGlobalDataTagsItemValue0) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// MergeAgentPolicyGlobalDataTagsItemValue0 performs a merge with any union data inside the AgentPolicyGlobalDataTagsItem_Value, using the provided AgentPolicyGlobalDataTagsItemValue0 -func (t *AgentPolicyGlobalDataTagsItem_Value) MergeAgentPolicyGlobalDataTagsItemValue0(v AgentPolicyGlobalDataTagsItemValue0) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest APMUI500Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 501: + var dest APMUI501Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON501 = &dest -// AsAgentPolicyGlobalDataTagsItemValue1 returns the union data inside the AgentPolicyGlobalDataTagsItem_Value as a AgentPolicyGlobalDataTagsItemValue1 -func (t AgentPolicyGlobalDataTagsItem_Value) AsAgentPolicyGlobalDataTagsItemValue1() (AgentPolicyGlobalDataTagsItemValue1, error) { - var body AgentPolicyGlobalDataTagsItemValue1 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromAgentPolicyGlobalDataTagsItemValue1 overwrites any union data inside the AgentPolicyGlobalDataTagsItem_Value as the provided AgentPolicyGlobalDataTagsItemValue1 -func (t *AgentPolicyGlobalDataTagsItem_Value) FromAgentPolicyGlobalDataTagsItemValue1(v AgentPolicyGlobalDataTagsItemValue1) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeAgentPolicyGlobalDataTagsItemValue1 performs a merge with any union data inside the AgentPolicyGlobalDataTagsItem_Value, using the provided AgentPolicyGlobalDataTagsItemValue1 -func (t *AgentPolicyGlobalDataTagsItem_Value) MergeAgentPolicyGlobalDataTagsItemValue1(v AgentPolicyGlobalDataTagsItemValue1) error { - b, err := json.Marshal(v) +// ParseUploadSourceMapResponse parses an HTTP response from a UploadSourceMapWithResponse call +func ParseUploadSourceMapResponse(rsp *http.Response) (*UploadSourceMapResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &UploadSourceMapResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t AgentPolicyGlobalDataTagsItem_Value) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest APMUIUploadSourceMapsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *AgentPolicyGlobalDataTagsItem_Value) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsBedrockConfig returns the union data inside the CreateConnectorConfig as a BedrockConfig -func (t CreateConnectorConfig) AsBedrockConfig() (BedrockConfig, error) { - var body BedrockConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromBedrockConfig overwrites any union data inside the CreateConnectorConfig as the provided BedrockConfig -func (t *CreateConnectorConfig) FromBedrockConfig(v BedrockConfig) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest APMUI403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// MergeBedrockConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided BedrockConfig -func (t *CreateConnectorConfig) MergeBedrockConfig(v BedrockConfig) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest APMUI500Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 501: + var dest APMUI501Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON501 = &dest -// AsCrowdstrikeConfig returns the union data inside the CreateConnectorConfig as a CrowdstrikeConfig -func (t CreateConnectorConfig) AsCrowdstrikeConfig() (CrowdstrikeConfig, error) { - var body CrowdstrikeConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromCrowdstrikeConfig overwrites any union data inside the CreateConnectorConfig as the provided CrowdstrikeConfig -func (t *CreateConnectorConfig) FromCrowdstrikeConfig(v CrowdstrikeConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeCrowdstrikeConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided CrowdstrikeConfig -func (t *CreateConnectorConfig) MergeCrowdstrikeConfig(v CrowdstrikeConfig) error { - b, err := json.Marshal(v) +// ParseDeleteSourceMapResponse parses an HTTP response from a DeleteSourceMapWithResponse call +func ParseDeleteSourceMapResponse(rsp *http.Response) (*DeleteSourceMapResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteSourceMapResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsD3securityConfig returns the union data inside the CreateConnectorConfig as a D3securityConfig -func (t CreateConnectorConfig) AsD3securityConfig() (D3securityConfig, error) { - var body D3securityConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromD3securityConfig overwrites any union data inside the CreateConnectorConfig as the provided D3securityConfig -func (t *CreateConnectorConfig) FromD3securityConfig(v D3securityConfig) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest APMUI400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeD3securityConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided D3securityConfig -func (t *CreateConnectorConfig) MergeD3securityConfig(v D3securityConfig) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest APMUI401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest APMUI403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsEmailConfig returns the union data inside the CreateConnectorConfig as a EmailConfig -func (t CreateConnectorConfig) AsEmailConfig() (EmailConfig, error) { - var body EmailConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest APMUI500Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// FromEmailConfig overwrites any union data inside the CreateConnectorConfig as the provided EmailConfig -func (t *CreateConnectorConfig) FromEmailConfig(v EmailConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 501: + var dest APMUI501Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON501 = &dest + + } + + return response, nil } -// MergeEmailConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided EmailConfig -func (t *CreateConnectorConfig) MergeEmailConfig(v EmailConfig) error { - b, err := json.Marshal(v) +// ParseDeleteAssetCriticalityRecordResponse parses an HTTP response from a DeleteAssetCriticalityRecordWithResponse call +func ParseDeleteAssetCriticalityRecordResponse(rsp *http.Response) (*DeleteAssetCriticalityRecordResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteAssetCriticalityRecordResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsGeminiConfig returns the union data inside the CreateConnectorConfig as a GeminiConfig -func (t CreateConnectorConfig) AsGeminiConfig() (GeminiConfig, error) { - var body GeminiConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Deleted True if the record was deleted or false if the record did not exist. + Deleted bool `json:"deleted"` + Record *SecurityEntityAnalyticsAPIAssetCriticalityRecord `json:"record,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromGeminiConfig overwrites any union data inside the CreateConnectorConfig as the provided GeminiConfig -func (t *CreateConnectorConfig) FromGeminiConfig(v GeminiConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeGeminiConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GeminiConfig -func (t *CreateConnectorConfig) MergeGeminiConfig(v GeminiConfig) error { - b, err := json.Marshal(v) +// ParseGetAssetCriticalityRecordResponse parses an HTTP response from a GetAssetCriticalityRecordWithResponse call +func ParseGetAssetCriticalityRecordResponse(rsp *http.Response) (*GetAssetCriticalityRecordResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetAssetCriticalityRecordResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsResilientConfig returns the union data inside the CreateConnectorConfig as a ResilientConfig -func (t CreateConnectorConfig) AsResilientConfig() (ResilientConfig, error) { - var body ResilientConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEntityAnalyticsAPIAssetCriticalityRecord + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromResilientConfig overwrites any union data inside the CreateConnectorConfig as the provided ResilientConfig -func (t *CreateConnectorConfig) FromResilientConfig(v ResilientConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeResilientConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ResilientConfig -func (t *CreateConnectorConfig) MergeResilientConfig(v ResilientConfig) error { - b, err := json.Marshal(v) +// ParseCreateAssetCriticalityRecordResponse parses an HTTP response from a CreateAssetCriticalityRecordWithResponse call +func ParseCreateAssetCriticalityRecordResponse(rsp *http.Response) (*CreateAssetCriticalityRecordResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateAssetCriticalityRecordResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsIndexConfig returns the union data inside the CreateConnectorConfig as a IndexConfig -func (t CreateConnectorConfig) AsIndexConfig() (IndexConfig, error) { - var body IndexConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEntityAnalyticsAPIAssetCriticalityRecord + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromIndexConfig overwrites any union data inside the CreateConnectorConfig as the provided IndexConfig -func (t *CreateConnectorConfig) FromIndexConfig(v IndexConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeIndexConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided IndexConfig -func (t *CreateConnectorConfig) MergeIndexConfig(v IndexConfig) error { - b, err := json.Marshal(v) +// ParseBulkUpsertAssetCriticalityRecordsResponse parses an HTTP response from a BulkUpsertAssetCriticalityRecordsWithResponse call +func ParseBulkUpsertAssetCriticalityRecordsResponse(rsp *http.Response) (*BulkUpsertAssetCriticalityRecordsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &BulkUpsertAssetCriticalityRecordsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsJiraConfig returns the union data inside the CreateConnectorConfig as a JiraConfig -func (t CreateConnectorConfig) AsJiraConfig() (JiraConfig, error) { - var body JiraConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Errors []SecurityEntityAnalyticsAPIAssetCriticalityBulkUploadErrorItem `json:"errors"` + Stats SecurityEntityAnalyticsAPIAssetCriticalityBulkUploadStats `json:"stats"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromJiraConfig overwrites any union data inside the CreateConnectorConfig as the provided JiraConfig -func (t *CreateConnectorConfig) FromJiraConfig(v JiraConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeJiraConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided JiraConfig -func (t *CreateConnectorConfig) MergeJiraConfig(v JiraConfig) error { - b, err := json.Marshal(v) +// ParseFindAssetCriticalityRecordsResponse parses an HTTP response from a FindAssetCriticalityRecordsWithResponse call +func ParseFindAssetCriticalityRecordsResponse(rsp *http.Response) (*FindAssetCriticalityRecordsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &FindAssetCriticalityRecordsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsGenaiAzureConfig returns the union data inside the CreateConnectorConfig as a GenaiAzureConfig -func (t CreateConnectorConfig) AsGenaiAzureConfig() (GenaiAzureConfig, error) { - var body GenaiAzureConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Page int `json:"page"` + PerPage int `json:"per_page"` + Records []SecurityEntityAnalyticsAPIAssetCriticalityRecord `json:"records"` + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromGenaiAzureConfig overwrites any union data inside the CreateConnectorConfig as the provided GenaiAzureConfig -func (t *CreateConnectorConfig) FromGenaiAzureConfig(v GenaiAzureConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeGenaiAzureConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GenaiAzureConfig -func (t *CreateConnectorConfig) MergeGenaiAzureConfig(v GenaiAzureConfig) error { - b, err := json.Marshal(v) +// ParseDeleteCaseDefaultSpaceResponse parses an HTTP response from a DeleteCaseDefaultSpaceWithResponse call +func ParseDeleteCaseDefaultSpaceResponse(rsp *http.Response) (*DeleteCaseDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteCaseDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsGenaiOpenaiConfig returns the union data inside the CreateConnectorConfig as a GenaiOpenaiConfig -func (t CreateConnectorConfig) AsGenaiOpenaiConfig() (GenaiOpenaiConfig, error) { - var body GenaiOpenaiConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromGenaiOpenaiConfig overwrites any union data inside the CreateConnectorConfig as the provided GenaiOpenaiConfig -func (t *CreateConnectorConfig) FromGenaiOpenaiConfig(v GenaiOpenaiConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeGenaiOpenaiConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GenaiOpenaiConfig -func (t *CreateConnectorConfig) MergeGenaiOpenaiConfig(v GenaiOpenaiConfig) error { - b, err := json.Marshal(v) +// ParseUpdateCaseDefaultSpaceResponse parses an HTTP response from a UpdateCaseDefaultSpaceWithResponse call +func ParseUpdateCaseDefaultSpaceResponse(rsp *http.Response) (*UpdateCaseDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &UpdateCaseDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsGenaiOpenaiOtherConfig returns the union data inside the CreateConnectorConfig as a GenaiOpenaiOtherConfig -func (t CreateConnectorConfig) AsGenaiOpenaiOtherConfig() (GenaiOpenaiOtherConfig, error) { - var body GenaiOpenaiOtherConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []CasesCaseResponseProperties + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromGenaiOpenaiOtherConfig overwrites any union data inside the CreateConnectorConfig as the provided GenaiOpenaiOtherConfig -func (t *CreateConnectorConfig) FromGenaiOpenaiOtherConfig(v GenaiOpenaiOtherConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeGenaiOpenaiOtherConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided GenaiOpenaiOtherConfig -func (t *CreateConnectorConfig) MergeGenaiOpenaiOtherConfig(v GenaiOpenaiOtherConfig) error { - b, err := json.Marshal(v) +// ParseCreateCaseDefaultSpaceResponse parses an HTTP response from a CreateCaseDefaultSpaceWithResponse call +func ParseCreateCaseDefaultSpaceResponse(rsp *http.Response) (*CreateCaseDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateCaseDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsOpsgenieConfig returns the union data inside the CreateConnectorConfig as a OpsgenieConfig -func (t CreateConnectorConfig) AsOpsgenieConfig() (OpsgenieConfig, error) { - var body OpsgenieConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CasesCaseResponseProperties + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromOpsgenieConfig overwrites any union data inside the CreateConnectorConfig as the provided OpsgenieConfig -func (t *CreateConnectorConfig) FromOpsgenieConfig(v OpsgenieConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeOpsgenieConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided OpsgenieConfig -func (t *CreateConnectorConfig) MergeOpsgenieConfig(v OpsgenieConfig) error { - b, err := json.Marshal(v) +// ParseFindCasesDefaultSpaceResponse parses an HTTP response from a FindCasesDefaultSpaceWithResponse call +func ParseFindCasesDefaultSpaceResponse(rsp *http.Response) (*FindCasesDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &FindCasesDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Cases *[]CasesCaseResponseProperties `json:"cases,omitempty"` + CountClosedCases *int `json:"count_closed_cases,omitempty"` + CountInProgressCases *int `json:"count_in_progress_cases,omitempty"` + CountOpenCases *int `json:"count_open_cases,omitempty"` + Page *int `json:"page,omitempty"` + PerPage *int `json:"per_page,omitempty"` + Total *int `json:"total,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// AsPagerdutyConfig returns the union data inside the CreateConnectorConfig as a PagerdutyConfig -func (t CreateConnectorConfig) AsPagerdutyConfig() (PagerdutyConfig, error) { - var body PagerdutyConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromPagerdutyConfig overwrites any union data inside the CreateConnectorConfig as the provided PagerdutyConfig -func (t *CreateConnectorConfig) FromPagerdutyConfig(v PagerdutyConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergePagerdutyConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided PagerdutyConfig -func (t *CreateConnectorConfig) MergePagerdutyConfig(v PagerdutyConfig) error { - b, err := json.Marshal(v) +// ParseGetCasesByAlertDefaultSpaceResponse parses an HTTP response from a GetCasesByAlertDefaultSpaceWithResponse call +func ParseGetCasesByAlertDefaultSpaceResponse(rsp *http.Response) (*GetCasesByAlertDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetCasesByAlertDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsSentineloneConfig returns the union data inside the CreateConnectorConfig as a SentineloneConfig -func (t CreateConnectorConfig) AsSentineloneConfig() (SentineloneConfig, error) { - var body SentineloneConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []struct { + // Id The case identifier. + Id *string `json:"id,omitempty"` -// FromSentineloneConfig overwrites any union data inside the CreateConnectorConfig as the provided SentineloneConfig -func (t *CreateConnectorConfig) FromSentineloneConfig(v SentineloneConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + // Title The case title. + Title *string `json:"title,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeSentineloneConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided SentineloneConfig -func (t *CreateConnectorConfig) MergeSentineloneConfig(v SentineloneConfig) error { - b, err := json.Marshal(v) +// ParseGetCaseConfigurationDefaultSpaceResponse parses an HTTP response from a GetCaseConfigurationDefaultSpaceWithResponse call +func ParseGetCaseConfigurationDefaultSpaceResponse(rsp *http.Response) (*GetCaseConfigurationDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetCaseConfigurationDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsServicenowConfig returns the union data inside the CreateConnectorConfig as a ServicenowConfig -func (t CreateConnectorConfig) AsServicenowConfig() (ServicenowConfig, error) { - var body ServicenowConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []struct { + // ClosureType Indicates whether a case is automatically closed when it is pushed to external systems (`close-by-pushing`) or not automatically closed (`close-by-user`). + ClosureType *CasesClosureTypes `json:"closure_type,omitempty"` + Connector *struct { + // Fields The fields specified in the case configuration are not used and are not propagated to individual cases, therefore it is recommended to set it to `null`. + Fields *map[string]interface{} `json:"fields"` -// FromServicenowConfig overwrites any union data inside the CreateConnectorConfig as the provided ServicenowConfig -func (t *CreateConnectorConfig) FromServicenowConfig(v ServicenowConfig) error { - b, err := json.Marshal(v) - t.union = b - return err -} + // Id The identifier for the connector. If you do not want a default connector, use `none`. To retrieve connector IDs, use the find connectors API. + Id *string `json:"id,omitempty"` -// MergeServicenowConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ServicenowConfig -func (t *CreateConnectorConfig) MergeServicenowConfig(v ServicenowConfig) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + // Name The name of the connector. If you do not want a default connector, use `none`. To retrieve connector names, use the find connectors API. + Name *string `json:"name,omitempty"` - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + // Type The type of connector. + Type *CasesConnectorTypes `json:"type,omitempty"` + } `json:"connector,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + CreatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"created_by,omitempty"` + + // CustomFields Custom fields configuration details. + CustomFields *[]struct { + // DefaultValue A default value for the custom field. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. + DefaultValue *GetCaseConfigurationDefaultSpace_200_CustomFields_DefaultValue `json:"defaultValue,omitempty"` + + // Key A unique key for the custom field. Must be lower case and composed only of a-z, 0-9, '_', and '-' characters. It is used in API calls to refer to a specific custom field. + Key *string `json:"key,omitempty"` + + // Label The custom field label that is displayed in the case. + Label *string `json:"label,omitempty"` + + // Required Indicates whether the field is required. If `false`, the custom field can be set to null or omitted when a case is created or updated. + Required *bool `json:"required,omitempty"` + + // Type The type of the custom field. + Type *GetCaseConfigurationDefaultSpace200CustomFieldsType `json:"type,omitempty"` + } `json:"customFields,omitempty"` + Error *string `json:"error"` + Id *string `json:"id,omitempty"` + Mappings *[]struct { + ActionType *string `json:"action_type,omitempty"` + Source *string `json:"source,omitempty"` + Target *string `json:"target,omitempty"` + } `json:"mappings,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner *CasesOwner `json:"owner,omitempty"` + Templates *CasesTemplates `json:"templates,omitempty"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"updated_by"` + Version *string `json:"version,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsServicenowItomConfig returns the union data inside the CreateConnectorConfig as a ServicenowItomConfig -func (t CreateConnectorConfig) AsServicenowItomConfig() (ServicenowItomConfig, error) { - var body ServicenowItomConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromServicenowItomConfig overwrites any union data inside the CreateConnectorConfig as the provided ServicenowItomConfig -func (t *CreateConnectorConfig) FromServicenowItomConfig(v ServicenowItomConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeServicenowItomConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ServicenowItomConfig -func (t *CreateConnectorConfig) MergeServicenowItomConfig(v ServicenowItomConfig) error { - b, err := json.Marshal(v) +// ParseSetCaseConfigurationDefaultSpaceResponse parses an HTTP response from a SetCaseConfigurationDefaultSpaceWithResponse call +func ParseSetCaseConfigurationDefaultSpaceResponse(rsp *http.Response) (*SetCaseConfigurationDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &SetCaseConfigurationDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsSlackApiConfig returns the union data inside the CreateConnectorConfig as a SlackApiConfig -func (t CreateConnectorConfig) AsSlackApiConfig() (SlackApiConfig, error) { - var body SlackApiConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // ClosureType Indicates whether a case is automatically closed when it is pushed to external systems (`close-by-pushing`) or not automatically closed (`close-by-user`). + ClosureType *CasesClosureTypes `json:"closure_type,omitempty"` + Connector *struct { + // Fields The fields specified in the case configuration are not used and are not propagated to individual cases, therefore it is recommended to set it to `null`. + Fields *map[string]interface{} `json:"fields"` -// FromSlackApiConfig overwrites any union data inside the CreateConnectorConfig as the provided SlackApiConfig -func (t *CreateConnectorConfig) FromSlackApiConfig(v SlackApiConfig) error { - b, err := json.Marshal(v) - t.union = b - return err -} + // Id The identifier for the connector. If you do not want a default connector, use `none`. To retrieve connector IDs, use the find connectors API. + Id *string `json:"id,omitempty"` -// MergeSlackApiConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided SlackApiConfig -func (t *CreateConnectorConfig) MergeSlackApiConfig(v SlackApiConfig) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + // Name The name of the connector. If you do not want a default connector, use `none`. To retrieve connector names, use the find connectors API. + Name *string `json:"name,omitempty"` - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + // Type The type of connector. + Type *CasesConnectorTypes `json:"type,omitempty"` + } `json:"connector,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + CreatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"created_by,omitempty"` + + // CustomFields Custom fields configuration details. + CustomFields *[]struct { + // DefaultValue A default value for the custom field. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. + DefaultValue *SetCaseConfigurationDefaultSpace_200_CustomFields_DefaultValue `json:"defaultValue,omitempty"` + + // Key A unique key for the custom field. Must be lower case and composed only of a-z, 0-9, '_', and '-' characters. It is used in API calls to refer to a specific custom field. + Key *string `json:"key,omitempty"` + + // Label The custom field label that is displayed in the case. + Label *string `json:"label,omitempty"` + + // Required Indicates whether the field is required. If `false`, the custom field can be set to null or omitted when a case is created or updated. + Required *bool `json:"required,omitempty"` + + // Type The type of the custom field. + Type *SetCaseConfigurationDefaultSpace200CustomFieldsType `json:"type,omitempty"` + } `json:"customFields,omitempty"` + Error *string `json:"error"` + Id *string `json:"id,omitempty"` + Mappings *[]struct { + ActionType *string `json:"action_type,omitempty"` + Source *string `json:"source,omitempty"` + Target *string `json:"target,omitempty"` + } `json:"mappings,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner *CasesOwner `json:"owner,omitempty"` + Templates *CasesTemplates `json:"templates,omitempty"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"updated_by"` + Version *string `json:"version,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsSwimlaneConfig returns the union data inside the CreateConnectorConfig as a SwimlaneConfig -func (t CreateConnectorConfig) AsSwimlaneConfig() (SwimlaneConfig, error) { - var body SwimlaneConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromSwimlaneConfig overwrites any union data inside the CreateConnectorConfig as the provided SwimlaneConfig -func (t *CreateConnectorConfig) FromSwimlaneConfig(v SwimlaneConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeSwimlaneConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided SwimlaneConfig -func (t *CreateConnectorConfig) MergeSwimlaneConfig(v SwimlaneConfig) error { - b, err := json.Marshal(v) +// ParseFindCaseConnectorsDefaultSpaceResponse parses an HTTP response from a FindCaseConnectorsDefaultSpaceWithResponse call +func ParseFindCaseConnectorsDefaultSpaceResponse(rsp *http.Response) (*FindCaseConnectorsDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &FindCaseConnectorsDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsThehiveConfig returns the union data inside the CreateConnectorConfig as a ThehiveConfig -func (t CreateConnectorConfig) AsThehiveConfig() (ThehiveConfig, error) { - var body ThehiveConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []struct { + // ActionTypeId The type of connector. + ActionTypeId *CasesConnectorTypes `json:"actionTypeId,omitempty"` + Config *FindCaseConnectorsDefaultSpace_200_Config `json:"config,omitempty"` + Id *string `json:"id,omitempty"` + IsDeprecated *bool `json:"isDeprecated,omitempty"` + IsMissingSecrets *bool `json:"isMissingSecrets,omitempty"` + IsPreconfigured *bool `json:"isPreconfigured,omitempty"` + Name *string `json:"name,omitempty"` + ReferencedByCount *int `json:"referencedByCount,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromThehiveConfig overwrites any union data inside the CreateConnectorConfig as the provided ThehiveConfig -func (t *CreateConnectorConfig) FromThehiveConfig(v ThehiveConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeThehiveConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided ThehiveConfig -func (t *CreateConnectorConfig) MergeThehiveConfig(v ThehiveConfig) error { - b, err := json.Marshal(v) +// ParseUpdateCaseConfigurationDefaultSpaceResponse parses an HTTP response from a UpdateCaseConfigurationDefaultSpaceWithResponse call +func ParseUpdateCaseConfigurationDefaultSpaceResponse(rsp *http.Response) (*UpdateCaseConfigurationDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &UpdateCaseConfigurationDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsTinesConfig returns the union data inside the CreateConnectorConfig as a TinesConfig -func (t CreateConnectorConfig) AsTinesConfig() (TinesConfig, error) { - var body TinesConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // ClosureType Indicates whether a case is automatically closed when it is pushed to external systems (`close-by-pushing`) or not automatically closed (`close-by-user`). + ClosureType *CasesClosureTypes `json:"closure_type,omitempty"` + Connector *struct { + // Fields The fields specified in the case configuration are not used and are not propagated to individual cases, therefore it is recommended to set it to `null`. + Fields *map[string]interface{} `json:"fields"` -// FromTinesConfig overwrites any union data inside the CreateConnectorConfig as the provided TinesConfig -func (t *CreateConnectorConfig) FromTinesConfig(v TinesConfig) error { - b, err := json.Marshal(v) - t.union = b - return err -} + // Id The identifier for the connector. If you do not want a default connector, use `none`. To retrieve connector IDs, use the find connectors API. + Id *string `json:"id,omitempty"` -// MergeTinesConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided TinesConfig -func (t *CreateConnectorConfig) MergeTinesConfig(v TinesConfig) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + // Name The name of the connector. If you do not want a default connector, use `none`. To retrieve connector names, use the find connectors API. + Name *string `json:"name,omitempty"` - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + // Type The type of connector. + Type *CasesConnectorTypes `json:"type,omitempty"` + } `json:"connector,omitempty"` + CreatedAt *time.Time `json:"created_at,omitempty"` + CreatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"created_by,omitempty"` + + // CustomFields Custom fields configuration details. + CustomFields *[]struct { + // DefaultValue A default value for the custom field. If the `type` is `text`, the default value must be a string. If the `type` is `toggle`, the default value must be boolean. + DefaultValue *UpdateCaseConfigurationDefaultSpace_200_CustomFields_DefaultValue `json:"defaultValue,omitempty"` + + // Key A unique key for the custom field. Must be lower case and composed only of a-z, 0-9, '_', and '-' characters. It is used in API calls to refer to a specific custom field. + Key *string `json:"key,omitempty"` + + // Label The custom field label that is displayed in the case. + Label *string `json:"label,omitempty"` + + // Required Indicates whether the field is required. If `false`, the custom field can be set to null or omitted when a case is created or updated. + Required *bool `json:"required,omitempty"` + + // Type The type of the custom field. + Type *UpdateCaseConfigurationDefaultSpace200CustomFieldsType `json:"type,omitempty"` + } `json:"customFields,omitempty"` + Error *string `json:"error"` + Id *string `json:"id,omitempty"` + Mappings *[]struct { + ActionType *string `json:"action_type,omitempty"` + Source *string `json:"source,omitempty"` + Target *string `json:"target,omitempty"` + } `json:"mappings,omitempty"` + + // Owner The application that owns the cases: Stack Management, Observability, or Elastic Security. + Owner *CasesOwner `json:"owner,omitempty"` + Templates *CasesTemplates `json:"templates,omitempty"` + UpdatedAt *time.Time `json:"updated_at"` + UpdatedBy *struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } `json:"updated_by"` + Version *string `json:"version,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsTorqConfig returns the union data inside the CreateConnectorConfig as a TorqConfig -func (t CreateConnectorConfig) AsTorqConfig() (TorqConfig, error) { - var body TorqConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromTorqConfig overwrites any union data inside the CreateConnectorConfig as the provided TorqConfig -func (t *CreateConnectorConfig) FromTorqConfig(v TorqConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeTorqConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided TorqConfig -func (t *CreateConnectorConfig) MergeTorqConfig(v TorqConfig) error { - b, err := json.Marshal(v) +// ParseGetCaseReportersDefaultSpaceResponse parses an HTTP response from a GetCaseReportersDefaultSpaceWithResponse call +func ParseGetCaseReportersDefaultSpaceResponse(rsp *http.Response) (*GetCaseReportersDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetCaseReportersDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsWebhookConfig returns the union data inside the CreateConnectorConfig as a WebhookConfig -func (t CreateConnectorConfig) AsWebhookConfig() (WebhookConfig, error) { - var body WebhookConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []struct { + Email *string `json:"email"` + FullName *string `json:"full_name"` + ProfileUid *string `json:"profile_uid,omitempty"` + Username *string `json:"username"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromWebhookConfig overwrites any union data inside the CreateConnectorConfig as the provided WebhookConfig -func (t *CreateConnectorConfig) FromWebhookConfig(v WebhookConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeWebhookConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided WebhookConfig -func (t *CreateConnectorConfig) MergeWebhookConfig(v WebhookConfig) error { - b, err := json.Marshal(v) +// ParseGetCaseTagsDefaultSpaceResponse parses an HTTP response from a GetCaseTagsDefaultSpaceWithResponse call +func ParseGetCaseTagsDefaultSpaceResponse(rsp *http.Response) (*GetCaseTagsDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetCaseTagsDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsCasesWebhookConfig returns the union data inside the CreateConnectorConfig as a CasesWebhookConfig -func (t CreateConnectorConfig) AsCasesWebhookConfig() (CasesWebhookConfig, error) { - var body CasesWebhookConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []string + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromCasesWebhookConfig overwrites any union data inside the CreateConnectorConfig as the provided CasesWebhookConfig -func (t *CreateConnectorConfig) FromCasesWebhookConfig(v CasesWebhookConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeCasesWebhookConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided CasesWebhookConfig -func (t *CreateConnectorConfig) MergeCasesWebhookConfig(v CasesWebhookConfig) error { - b, err := json.Marshal(v) +// ParseGetCaseDefaultSpaceResponse parses an HTTP response from a GetCaseDefaultSpaceWithResponse call +func ParseGetCaseDefaultSpaceResponse(rsp *http.Response) (*GetCaseDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetCaseDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsXmattersConfig returns the union data inside the CreateConnectorConfig as a XmattersConfig -func (t CreateConnectorConfig) AsXmattersConfig() (XmattersConfig, error) { - var body XmattersConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CasesCaseResponseProperties + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromXmattersConfig overwrites any union data inside the CreateConnectorConfig as the provided XmattersConfig -func (t *CreateConnectorConfig) FromXmattersConfig(v XmattersConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeXmattersConfig performs a merge with any union data inside the CreateConnectorConfig, using the provided XmattersConfig -func (t *CreateConnectorConfig) MergeXmattersConfig(v XmattersConfig) error { - b, err := json.Marshal(v) +// ParseGetCaseAlertsDefaultSpaceResponse parses an HTTP response from a GetCaseAlertsDefaultSpaceWithResponse call +func ParseGetCaseAlertsDefaultSpaceResponse(rsp *http.Response) (*GetCaseAlertsDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetCaseAlertsDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsBedrockSecrets returns the union data inside the CreateConnectorSecrets as a BedrockSecrets -func (t CreateConnectorSecrets) AsBedrockSecrets() (BedrockSecrets, error) { - var body BedrockSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []CasesAlertResponseProperties + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromBedrockSecrets overwrites any union data inside the CreateConnectorSecrets as the provided BedrockSecrets -func (t *CreateConnectorSecrets) FromBedrockSecrets(v BedrockSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeBedrockSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided BedrockSecrets -func (t *CreateConnectorSecrets) MergeBedrockSecrets(v BedrockSecrets) error { - b, err := json.Marshal(v) +// ParseDeleteCaseCommentsDefaultSpaceResponse parses an HTTP response from a DeleteCaseCommentsDefaultSpaceWithResponse call +func ParseDeleteCaseCommentsDefaultSpaceResponse(rsp *http.Response) (*DeleteCaseCommentsDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteCaseCommentsDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsCrowdstrikeSecrets returns the union data inside the CreateConnectorSecrets as a CrowdstrikeSecrets -func (t CreateConnectorSecrets) AsCrowdstrikeSecrets() (CrowdstrikeSecrets, error) { - var body CrowdstrikeSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromCrowdstrikeSecrets overwrites any union data inside the CreateConnectorSecrets as the provided CrowdstrikeSecrets -func (t *CreateConnectorSecrets) FromCrowdstrikeSecrets(v CrowdstrikeSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeCrowdstrikeSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided CrowdstrikeSecrets -func (t *CreateConnectorSecrets) MergeCrowdstrikeSecrets(v CrowdstrikeSecrets) error { - b, err := json.Marshal(v) +// ParseUpdateCaseCommentDefaultSpaceResponse parses an HTTP response from a UpdateCaseCommentDefaultSpaceWithResponse call +func ParseUpdateCaseCommentDefaultSpaceResponse(rsp *http.Response) (*UpdateCaseCommentDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &UpdateCaseCommentDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CasesCaseResponseProperties + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// AsD3securitySecrets returns the union data inside the CreateConnectorSecrets as a D3securitySecrets -func (t CreateConnectorSecrets) AsD3securitySecrets() (D3securitySecrets, error) { - var body D3securitySecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromD3securitySecrets overwrites any union data inside the CreateConnectorSecrets as the provided D3securitySecrets -func (t *CreateConnectorSecrets) FromD3securitySecrets(v D3securitySecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeD3securitySecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided D3securitySecrets -func (t *CreateConnectorSecrets) MergeD3securitySecrets(v D3securitySecrets) error { - b, err := json.Marshal(v) +// ParseAddCaseCommentDefaultSpaceResponse parses an HTTP response from a AddCaseCommentDefaultSpaceWithResponse call +func ParseAddCaseCommentDefaultSpaceResponse(rsp *http.Response) (*AddCaseCommentDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &AddCaseCommentDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsEmailSecrets returns the union data inside the CreateConnectorSecrets as a EmailSecrets -func (t CreateConnectorSecrets) AsEmailSecrets() (EmailSecrets, error) { - var body EmailSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CasesCaseResponseProperties + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromEmailSecrets overwrites any union data inside the CreateConnectorSecrets as the provided EmailSecrets -func (t *CreateConnectorSecrets) FromEmailSecrets(v EmailSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeEmailSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided EmailSecrets -func (t *CreateConnectorSecrets) MergeEmailSecrets(v EmailSecrets) error { - b, err := json.Marshal(v) +// ParseFindCaseCommentsDefaultSpaceResponse parses an HTTP response from a FindCaseCommentsDefaultSpaceWithResponse call +func ParseFindCaseCommentsDefaultSpaceResponse(rsp *http.Response) (*FindCaseCommentsDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &FindCaseCommentsDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsGeminiSecrets returns the union data inside the CreateConnectorSecrets as a GeminiSecrets -func (t CreateConnectorSecrets) AsGeminiSecrets() (GeminiSecrets, error) { - var body GeminiSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CasesCaseResponseProperties + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromGeminiSecrets overwrites any union data inside the CreateConnectorSecrets as the provided GeminiSecrets -func (t *CreateConnectorSecrets) FromGeminiSecrets(v GeminiSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeGeminiSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided GeminiSecrets -func (t *CreateConnectorSecrets) MergeGeminiSecrets(v GeminiSecrets) error { - b, err := json.Marshal(v) +// ParseDeleteCaseCommentDefaultSpaceResponse parses an HTTP response from a DeleteCaseCommentDefaultSpaceWithResponse call +func ParseDeleteCaseCommentDefaultSpaceResponse(rsp *http.Response) (*DeleteCaseCommentDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteCaseCommentDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsResilientSecrets returns the union data inside the CreateConnectorSecrets as a ResilientSecrets -func (t CreateConnectorSecrets) AsResilientSecrets() (ResilientSecrets, error) { - var body ResilientSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromResilientSecrets overwrites any union data inside the CreateConnectorSecrets as the provided ResilientSecrets -func (t *CreateConnectorSecrets) FromResilientSecrets(v ResilientSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeResilientSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided ResilientSecrets -func (t *CreateConnectorSecrets) MergeResilientSecrets(v ResilientSecrets) error { - b, err := json.Marshal(v) +// ParseGetCaseCommentDefaultSpaceResponse parses an HTTP response from a GetCaseCommentDefaultSpaceWithResponse call +func ParseGetCaseCommentDefaultSpaceResponse(rsp *http.Response) (*GetCaseCommentDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetCaseCommentDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsJiraSecrets returns the union data inside the CreateConnectorSecrets as a JiraSecrets -func (t CreateConnectorSecrets) AsJiraSecrets() (JiraSecrets, error) { - var body JiraSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromJiraSecrets overwrites any union data inside the CreateConnectorSecrets as the provided JiraSecrets -func (t *CreateConnectorSecrets) FromJiraSecrets(v JiraSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeJiraSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided JiraSecrets -func (t *CreateConnectorSecrets) MergeJiraSecrets(v JiraSecrets) error { - b, err := json.Marshal(v) +// ParsePushCaseDefaultSpaceResponse parses an HTTP response from a PushCaseDefaultSpaceWithResponse call +func ParsePushCaseDefaultSpaceResponse(rsp *http.Response) (*PushCaseDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &PushCaseDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsDefenderSecrets returns the union data inside the CreateConnectorSecrets as a DefenderSecrets -func (t CreateConnectorSecrets) AsDefenderSecrets() (DefenderSecrets, error) { - var body DefenderSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CasesCaseResponseProperties + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromDefenderSecrets overwrites any union data inside the CreateConnectorSecrets as the provided DefenderSecrets -func (t *CreateConnectorSecrets) FromDefenderSecrets(v DefenderSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeDefenderSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided DefenderSecrets -func (t *CreateConnectorSecrets) MergeDefenderSecrets(v DefenderSecrets) error { - b, err := json.Marshal(v) +// ParseAddCaseFileDefaultSpaceResponse parses an HTTP response from a AddCaseFileDefaultSpaceWithResponse call +func ParseAddCaseFileDefaultSpaceResponse(rsp *http.Response) (*AddCaseFileDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &AddCaseFileDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsTeamsSecrets returns the union data inside the CreateConnectorSecrets as a TeamsSecrets -func (t CreateConnectorSecrets) AsTeamsSecrets() (TeamsSecrets, error) { - var body TeamsSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CasesCaseResponseProperties + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromTeamsSecrets overwrites any union data inside the CreateConnectorSecrets as the provided TeamsSecrets -func (t *CreateConnectorSecrets) FromTeamsSecrets(v TeamsSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeTeamsSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided TeamsSecrets -func (t *CreateConnectorSecrets) MergeTeamsSecrets(v TeamsSecrets) error { - b, err := json.Marshal(v) +// ParseFindCaseActivityDefaultSpaceResponse parses an HTTP response from a FindCaseActivityDefaultSpaceWithResponse call +func ParseFindCaseActivityDefaultSpaceResponse(rsp *http.Response) (*FindCaseActivityDefaultSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &FindCaseActivityDefaultSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsGenaiSecrets returns the union data inside the CreateConnectorSecrets as a GenaiSecrets -func (t CreateConnectorSecrets) AsGenaiSecrets() (GenaiSecrets, error) { - var body GenaiSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Page *int `json:"page,omitempty"` + PerPage *int `json:"perPage,omitempty"` + Total *int `json:"total,omitempty"` + UserActions *[]CasesUserActionsFindResponseProperties `json:"userActions,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromGenaiSecrets overwrites any union data inside the CreateConnectorSecrets as the provided GenaiSecrets -func (t *CreateConnectorSecrets) FromGenaiSecrets(v GenaiSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest Cases4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + } + + return response, nil } -// MergeGenaiSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided GenaiSecrets -func (t *CreateConnectorSecrets) MergeGenaiSecrets(v GenaiSecrets) error { - b, err := json.Marshal(v) +// ParseUpdateFieldsMetadataDefaultResponse parses an HTTP response from a UpdateFieldsMetadataDefaultWithResponse call +func ParseUpdateFieldsMetadataDefaultResponse(rsp *http.Response) (*UpdateFieldsMetadataDefaultResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &UpdateFieldsMetadataDefaultResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsOpsgenieSecrets returns the union data inside the CreateConnectorSecrets as a OpsgenieSecrets -func (t CreateConnectorSecrets) AsOpsgenieSecrets() (OpsgenieSecrets, error) { - var body OpsgenieSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Acknowledged *bool `json:"acknowledged,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromOpsgenieSecrets overwrites any union data inside the CreateConnectorSecrets as the provided OpsgenieSecrets -func (t *CreateConnectorSecrets) FromOpsgenieSecrets(v OpsgenieSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest DataViews400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil } -// MergeOpsgenieSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided OpsgenieSecrets -func (t *CreateConnectorSecrets) MergeOpsgenieSecrets(v OpsgenieSecrets) error { - b, err := json.Marshal(v) +// ParseCreateRuntimeFieldDefaultResponse parses an HTTP response from a CreateRuntimeFieldDefaultWithResponse call +func ParseCreateRuntimeFieldDefaultResponse(rsp *http.Response) (*CreateRuntimeFieldDefaultResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateRuntimeFieldDefaultResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPagerdutySecrets returns the union data inside the CreateConnectorSecrets as a PagerdutySecrets -func (t CreateConnectorSecrets) AsPagerdutySecrets() (PagerdutySecrets, error) { - var body PagerdutySecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPagerdutySecrets overwrites any union data inside the CreateConnectorSecrets as the provided PagerdutySecrets -func (t *CreateConnectorSecrets) FromPagerdutySecrets(v PagerdutySecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergePagerdutySecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided PagerdutySecrets -func (t *CreateConnectorSecrets) MergePagerdutySecrets(v PagerdutySecrets) error { - b, err := json.Marshal(v) +// ParseCreateUpdateRuntimeFieldDefaultResponse parses an HTTP response from a CreateUpdateRuntimeFieldDefaultWithResponse call +func ParseCreateUpdateRuntimeFieldDefaultResponse(rsp *http.Response) (*CreateUpdateRuntimeFieldDefaultResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateUpdateRuntimeFieldDefaultResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsSentineloneSecrets returns the union data inside the CreateConnectorSecrets as a SentineloneSecrets -func (t CreateConnectorSecrets) AsSentineloneSecrets() (SentineloneSecrets, error) { - var body SentineloneSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + DataView *map[string]interface{} `json:"data_view,omitempty"` + Fields *[]map[string]interface{} `json:"fields,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromSentineloneSecrets overwrites any union data inside the CreateConnectorSecrets as the provided SentineloneSecrets -func (t *CreateConnectorSecrets) FromSentineloneSecrets(v SentineloneSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest DataViews400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil } -// MergeSentineloneSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided SentineloneSecrets -func (t *CreateConnectorSecrets) MergeSentineloneSecrets(v SentineloneSecrets) error { - b, err := json.Marshal(v) +// ParseDeleteRuntimeFieldDefaultResponse parses an HTTP response from a DeleteRuntimeFieldDefaultWithResponse call +func ParseDeleteRuntimeFieldDefaultResponse(rsp *http.Response) (*DeleteRuntimeFieldDefaultResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteRuntimeFieldDefaultResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsServicenowSecrets returns the union data inside the CreateConnectorSecrets as a ServicenowSecrets -func (t CreateConnectorSecrets) AsServicenowSecrets() (ServicenowSecrets, error) { - var body ServicenowSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest DataViews404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// FromServicenowSecrets overwrites any union data inside the CreateConnectorSecrets as the provided ServicenowSecrets -func (t *CreateConnectorSecrets) FromServicenowSecrets(v ServicenowSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeServicenowSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided ServicenowSecrets -func (t *CreateConnectorSecrets) MergeServicenowSecrets(v ServicenowSecrets) error { - b, err := json.Marshal(v) +// ParseGetRuntimeFieldDefaultResponse parses an HTTP response from a GetRuntimeFieldDefaultWithResponse call +func ParseGetRuntimeFieldDefaultResponse(rsp *http.Response) (*GetRuntimeFieldDefaultResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetRuntimeFieldDefaultResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsSlackApiSecrets returns the union data inside the CreateConnectorSecrets as a SlackApiSecrets -func (t CreateConnectorSecrets) AsSlackApiSecrets() (SlackApiSecrets, error) { - var body SlackApiSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + DataView *map[string]interface{} `json:"data_view,omitempty"` + Fields *[]map[string]interface{} `json:"fields,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromSlackApiSecrets overwrites any union data inside the CreateConnectorSecrets as the provided SlackApiSecrets -func (t *CreateConnectorSecrets) FromSlackApiSecrets(v SlackApiSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest DataViews404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + } + + return response, nil } -// MergeSlackApiSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided SlackApiSecrets -func (t *CreateConnectorSecrets) MergeSlackApiSecrets(v SlackApiSecrets) error { - b, err := json.Marshal(v) +// ParseUpdateRuntimeFieldDefaultResponse parses an HTTP response from a UpdateRuntimeFieldDefaultWithResponse call +func ParseUpdateRuntimeFieldDefaultResponse(rsp *http.Response) (*UpdateRuntimeFieldDefaultResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &UpdateRuntimeFieldDefaultResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest DataViews400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsSwimlaneSecrets returns the union data inside the CreateConnectorSecrets as a SwimlaneSecrets -func (t CreateConnectorSecrets) AsSwimlaneSecrets() (SwimlaneSecrets, error) { - var body SwimlaneSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromSwimlaneSecrets overwrites any union data inside the CreateConnectorSecrets as the provided SwimlaneSecrets -func (t *CreateConnectorSecrets) FromSwimlaneSecrets(v SwimlaneSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeSwimlaneSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided SwimlaneSecrets -func (t *CreateConnectorSecrets) MergeSwimlaneSecrets(v SwimlaneSecrets) error { - b, err := json.Marshal(v) +// ParseGetDefaultDataViewDefaultResponse parses an HTTP response from a GetDefaultDataViewDefaultWithResponse call +func ParseGetDefaultDataViewDefaultResponse(rsp *http.Response) (*GetDefaultDataViewDefaultResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetDefaultDataViewDefaultResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsThehiveSecrets returns the union data inside the CreateConnectorSecrets as a ThehiveSecrets -func (t CreateConnectorSecrets) AsThehiveSecrets() (ThehiveSecrets, error) { - var body ThehiveSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + DataViewId *string `json:"data_view_id,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromThehiveSecrets overwrites any union data inside the CreateConnectorSecrets as the provided ThehiveSecrets -func (t *CreateConnectorSecrets) FromThehiveSecrets(v ThehiveSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest DataViews400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil } -// MergeThehiveSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided ThehiveSecrets -func (t *CreateConnectorSecrets) MergeThehiveSecrets(v ThehiveSecrets) error { - b, err := json.Marshal(v) +// ParseSetDefaultDatailViewDefaultResponse parses an HTTP response from a SetDefaultDatailViewDefaultWithResponse call +func ParseSetDefaultDatailViewDefaultResponse(rsp *http.Response) (*SetDefaultDatailViewDefaultResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &SetDefaultDatailViewDefaultResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsTinesSecrets returns the union data inside the CreateConnectorSecrets as a TinesSecrets -func (t CreateConnectorSecrets) AsTinesSecrets() (TinesSecrets, error) { - var body TinesSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Acknowledged *bool `json:"acknowledged,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromTinesSecrets overwrites any union data inside the CreateConnectorSecrets as the provided TinesSecrets -func (t *CreateConnectorSecrets) FromTinesSecrets(v TinesSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest DataViews400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil } -// MergeTinesSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided TinesSecrets -func (t *CreateConnectorSecrets) MergeTinesSecrets(v TinesSecrets) error { - b, err := json.Marshal(v) +// ParseSwapDataViewsDefaultResponse parses an HTTP response from a SwapDataViewsDefaultWithResponse call +func ParseSwapDataViewsDefaultResponse(rsp *http.Response) (*SwapDataViewsDefaultResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &SwapDataViewsDefaultResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsTorqSecrets returns the union data inside the CreateConnectorSecrets as a TorqSecrets -func (t CreateConnectorSecrets) AsTorqSecrets() (TorqSecrets, error) { - var body TorqSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + DeleteStatus *struct { + DeletePerformed *bool `json:"deletePerformed,omitempty"` + RemainingRefs *int `json:"remainingRefs,omitempty"` + } `json:"deleteStatus,omitempty"` + Result *[]struct { + // Id A saved object identifier. + Id *string `json:"id,omitempty"` + + // Type The saved object type. + Type *string `json:"type,omitempty"` + } `json:"result,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromTorqSecrets overwrites any union data inside the CreateConnectorSecrets as the provided TorqSecrets -func (t *CreateConnectorSecrets) FromTorqSecrets(v TorqSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeTorqSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided TorqSecrets -func (t *CreateConnectorSecrets) MergeTorqSecrets(v TorqSecrets) error { - b, err := json.Marshal(v) +// ParsePreviewSwapDataViewsDefaultResponse parses an HTTP response from a PreviewSwapDataViewsDefaultWithResponse call +func ParsePreviewSwapDataViewsDefaultResponse(rsp *http.Response) (*PreviewSwapDataViewsDefaultResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &PreviewSwapDataViewsDefaultResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsWebhookSecrets returns the union data inside the CreateConnectorSecrets as a WebhookSecrets -func (t CreateConnectorSecrets) AsWebhookSecrets() (WebhookSecrets, error) { - var body WebhookSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Result *[]struct { + // Id A saved object identifier. + Id *string `json:"id,omitempty"` -// FromWebhookSecrets overwrites any union data inside the CreateConnectorSecrets as the provided WebhookSecrets -func (t *CreateConnectorSecrets) FromWebhookSecrets(v WebhookSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + // Type The saved object type. + Type *string `json:"type,omitempty"` + } `json:"result,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -// MergeWebhookSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided WebhookSecrets -func (t *CreateConnectorSecrets) MergeWebhookSecrets(v WebhookSecrets) error { - b, err := json.Marshal(v) +// ParseDeleteAlertsIndexResponse parses an HTTP response from a DeleteAlertsIndexWithResponse call +func ParseDeleteAlertsIndexResponse(rsp *http.Response) (*DeleteAlertsIndexResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteAlertsIndexResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsCasesWebhookSecrets returns the union data inside the CreateConnectorSecrets as a CasesWebhookSecrets -func (t CreateConnectorSecrets) AsCasesWebhookSecrets() (CasesWebhookSecrets, error) { - var body CasesWebhookSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Acknowledged bool `json:"acknowledged"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromCasesWebhookSecrets overwrites any union data inside the CreateConnectorSecrets as the provided CasesWebhookSecrets -func (t *CreateConnectorSecrets) FromCasesWebhookSecrets(v CasesWebhookSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// MergeCasesWebhookSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided CasesWebhookSecrets -func (t *CreateConnectorSecrets) MergeCasesWebhookSecrets(v CasesWebhookSecrets) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest string + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// AsXmattersSecrets returns the union data inside the CreateConnectorSecrets as a XmattersSecrets -func (t CreateConnectorSecrets) AsXmattersSecrets() (XmattersSecrets, error) { - var body XmattersSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// FromXmattersSecrets overwrites any union data inside the CreateConnectorSecrets as the provided XmattersSecrets -func (t *CreateConnectorSecrets) FromXmattersSecrets(v XmattersSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeXmattersSecrets performs a merge with any union data inside the CreateConnectorSecrets, using the provided XmattersSecrets -func (t *CreateConnectorSecrets) MergeXmattersSecrets(v XmattersSecrets) error { - b, err := json.Marshal(v) +// ParseReadAlertsIndexResponse parses an HTTP response from a ReadAlertsIndexWithResponse call +func ParseReadAlertsIndexResponse(rsp *http.Response) (*ReadAlertsIndexResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ReadAlertsIndexResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsCreateParamResponse0 returns the union data inside the CreateParamResponse as a CreateParamResponse0 -func (t CreateParamResponse) AsCreateParamResponse0() (CreateParamResponse0, error) { - var body CreateParamResponse0 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + IndexMappingOutdated *bool `json:"index_mapping_outdated"` + Name string `json:"name"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromCreateParamResponse0 overwrites any union data inside the CreateParamResponse as the provided CreateParamResponse0 -func (t *CreateParamResponse) FromCreateParamResponse0(v CreateParamResponse0) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// MergeCreateParamResponse0 performs a merge with any union data inside the CreateParamResponse, using the provided CreateParamResponse0 -func (t *CreateParamResponse) MergeCreateParamResponse0(v CreateParamResponse0) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// AsSyntheticsPostParameterResponse returns the union data inside the CreateParamResponse as a SyntheticsPostParameterResponse -func (t CreateParamResponse) AsSyntheticsPostParameterResponse() (SyntheticsPostParameterResponse, error) { - var body SyntheticsPostParameterResponse - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// FromSyntheticsPostParameterResponse overwrites any union data inside the CreateParamResponse as the provided SyntheticsPostParameterResponse -func (t *CreateParamResponse) FromSyntheticsPostParameterResponse(v SyntheticsPostParameterResponse) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeSyntheticsPostParameterResponse performs a merge with any union data inside the CreateParamResponse, using the provided SyntheticsPostParameterResponse -func (t *CreateParamResponse) MergeSyntheticsPostParameterResponse(v SyntheticsPostParameterResponse) error { - b, err := json.Marshal(v) +// ParseCreateAlertsIndexResponse parses an HTTP response from a CreateAlertsIndexWithResponse call +func ParseCreateAlertsIndexResponse(rsp *http.Response) (*CreateAlertsIndexResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateAlertsIndexResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t CreateParamResponse) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Acknowledged bool `json:"acknowledged"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *CreateParamResponse) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// AsNewOutputElasticsearchSecretsSslKey0 returns the union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as a NewOutputElasticsearchSecretsSslKey0 -func (t NewOutputElasticsearch_Secrets_Ssl_Key) AsNewOutputElasticsearchSecretsSslKey0() (NewOutputElasticsearchSecretsSslKey0, error) { - var body NewOutputElasticsearchSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// FromNewOutputElasticsearchSecretsSslKey0 overwrites any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as the provided NewOutputElasticsearchSecretsSslKey0 -func (t *NewOutputElasticsearch_Secrets_Ssl_Key) FromNewOutputElasticsearchSecretsSslKey0(v NewOutputElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeNewOutputElasticsearchSecretsSslKey0 performs a merge with any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key, using the provided NewOutputElasticsearchSecretsSslKey0 -func (t *NewOutputElasticsearch_Secrets_Ssl_Key) MergeNewOutputElasticsearchSecretsSslKey0(v NewOutputElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) +// ParseReadPrivilegesResponse parses an HTTP response from a ReadPrivilegesWithResponse call +func ParseReadPrivilegesResponse(rsp *http.Response) (*ReadPrivilegesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ReadPrivilegesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsNewOutputElasticsearchSecretsSslKey1 returns the union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as a NewOutputElasticsearchSecretsSslKey1 -func (t NewOutputElasticsearch_Secrets_Ssl_Key) AsNewOutputElasticsearchSecretsSslKey1() (NewOutputElasticsearchSecretsSslKey1, error) { - var body NewOutputElasticsearchSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + HasEncryptionKey bool `json:"has_encryption_key"` + IsAuthenticated bool `json:"is_authenticated"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromNewOutputElasticsearchSecretsSslKey1 overwrites any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key as the provided NewOutputElasticsearchSecretsSslKey1 -func (t *NewOutputElasticsearch_Secrets_Ssl_Key) FromNewOutputElasticsearchSecretsSslKey1(v NewOutputElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// MergeNewOutputElasticsearchSecretsSslKey1 performs a merge with any union data inside the NewOutputElasticsearch_Secrets_Ssl_Key, using the provided NewOutputElasticsearchSecretsSslKey1 -func (t *NewOutputElasticsearch_Secrets_Ssl_Key) MergeNewOutputElasticsearchSecretsSslKey1(v NewOutputElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) - if err != nil { - return err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err + return response, nil } -func (t NewOutputElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} +// ParseDeleteRuleResponse parses an HTTP response from a DeleteRuleWithResponse call +func ParseDeleteRuleResponse(rsp *http.Response) (*DeleteRuleResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } -func (t *NewOutputElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + response := &DeleteRuleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsNewOutputKafkaSecretsPassword0 returns the union data inside the NewOutputKafka_Secrets_Password as a NewOutputKafkaSecretsPassword0 -func (t NewOutputKafka_Secrets_Password) AsNewOutputKafkaSecretsPassword0() (NewOutputKafkaSecretsPassword0, error) { - var body NewOutputKafkaSecretsPassword0 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityDetectionsAPIRuleResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromNewOutputKafkaSecretsPassword0 overwrites any union data inside the NewOutputKafka_Secrets_Password as the provided NewOutputKafkaSecretsPassword0 -func (t *NewOutputKafka_Secrets_Password) FromNewOutputKafkaSecretsPassword0(v NewOutputKafkaSecretsPassword0) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeNewOutputKafkaSecretsPassword0 performs a merge with any union data inside the NewOutputKafka_Secrets_Password, using the provided NewOutputKafkaSecretsPassword0 -func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword0(v NewOutputKafkaSecretsPassword0) error { - b, err := json.Marshal(v) +// ParseReadRuleResponse parses an HTTP response from a ReadRuleWithResponse call +func ParseReadRuleResponse(rsp *http.Response) (*ReadRuleResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ReadRuleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsNewOutputKafkaSecretsPassword1 returns the union data inside the NewOutputKafka_Secrets_Password as a NewOutputKafkaSecretsPassword1 -func (t NewOutputKafka_Secrets_Password) AsNewOutputKafkaSecretsPassword1() (NewOutputKafkaSecretsPassword1, error) { - var body NewOutputKafkaSecretsPassword1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityDetectionsAPIRuleResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromNewOutputKafkaSecretsPassword1 overwrites any union data inside the NewOutputKafka_Secrets_Password as the provided NewOutputKafkaSecretsPassword1 -func (t *NewOutputKafka_Secrets_Password) FromNewOutputKafkaSecretsPassword1(v NewOutputKafkaSecretsPassword1) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeNewOutputKafkaSecretsPassword1 performs a merge with any union data inside the NewOutputKafka_Secrets_Password, using the provided NewOutputKafkaSecretsPassword1 -func (t *NewOutputKafka_Secrets_Password) MergeNewOutputKafkaSecretsPassword1(v NewOutputKafkaSecretsPassword1) error { - b, err := json.Marshal(v) +// ParsePatchRuleResponse parses an HTTP response from a PatchRuleWithResponse call +func ParsePatchRuleResponse(rsp *http.Response) (*PatchRuleResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t NewOutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + response := &PatchRuleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t *NewOutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityDetectionsAPIRuleResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsNewOutputKafkaSecretsSslKey0 returns the union data inside the NewOutputKafka_Secrets_Ssl_Key as a NewOutputKafkaSecretsSslKey0 -func (t NewOutputKafka_Secrets_Ssl_Key) AsNewOutputKafkaSecretsSslKey0() (NewOutputKafkaSecretsSslKey0, error) { - var body NewOutputKafkaSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromNewOutputKafkaSecretsSslKey0 overwrites any union data inside the NewOutputKafka_Secrets_Ssl_Key as the provided NewOutputKafkaSecretsSslKey0 -func (t *NewOutputKafka_Secrets_Ssl_Key) FromNewOutputKafkaSecretsSslKey0(v NewOutputKafkaSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeNewOutputKafkaSecretsSslKey0 performs a merge with any union data inside the NewOutputKafka_Secrets_Ssl_Key, using the provided NewOutputKafkaSecretsSslKey0 -func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey0(v NewOutputKafkaSecretsSslKey0) error { - b, err := json.Marshal(v) +// ParseCreateRuleResponse parses an HTTP response from a CreateRuleWithResponse call +func ParseCreateRuleResponse(rsp *http.Response) (*CreateRuleResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateRuleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsNewOutputKafkaSecretsSslKey1 returns the union data inside the NewOutputKafka_Secrets_Ssl_Key as a NewOutputKafkaSecretsSslKey1 -func (t NewOutputKafka_Secrets_Ssl_Key) AsNewOutputKafkaSecretsSslKey1() (NewOutputKafkaSecretsSslKey1, error) { - var body NewOutputKafkaSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityDetectionsAPIRuleResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromNewOutputKafkaSecretsSslKey1 overwrites any union data inside the NewOutputKafka_Secrets_Ssl_Key as the provided NewOutputKafkaSecretsSslKey1 -func (t *NewOutputKafka_Secrets_Ssl_Key) FromNewOutputKafkaSecretsSslKey1(v NewOutputKafkaSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeNewOutputKafkaSecretsSslKey1 performs a merge with any union data inside the NewOutputKafka_Secrets_Ssl_Key, using the provided NewOutputKafkaSecretsSslKey1 -func (t *NewOutputKafka_Secrets_Ssl_Key) MergeNewOutputKafkaSecretsSslKey1(v NewOutputKafkaSecretsSslKey1) error { - b, err := json.Marshal(v) +// ParseUpdateRuleResponse parses an HTTP response from a UpdateRuleWithResponse call +func ParseUpdateRuleResponse(rsp *http.Response) (*UpdateRuleResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t NewOutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + response := &UpdateRuleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t *NewOutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityDetectionsAPIRuleResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsNewOutputLogstashSecretsSslKey0 returns the union data inside the NewOutputLogstash_Secrets_Ssl_Key as a NewOutputLogstashSecretsSslKey0 -func (t NewOutputLogstash_Secrets_Ssl_Key) AsNewOutputLogstashSecretsSslKey0() (NewOutputLogstashSecretsSslKey0, error) { - var body NewOutputLogstashSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromNewOutputLogstashSecretsSslKey0 overwrites any union data inside the NewOutputLogstash_Secrets_Ssl_Key as the provided NewOutputLogstashSecretsSslKey0 -func (t *NewOutputLogstash_Secrets_Ssl_Key) FromNewOutputLogstashSecretsSslKey0(v NewOutputLogstashSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeNewOutputLogstashSecretsSslKey0 performs a merge with any union data inside the NewOutputLogstash_Secrets_Ssl_Key, using the provided NewOutputLogstashSecretsSslKey0 -func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey0(v NewOutputLogstashSecretsSslKey0) error { - b, err := json.Marshal(v) +// ParsePerformRulesBulkActionResponse parses an HTTP response from a PerformRulesBulkActionWithResponse call +func ParsePerformRulesBulkActionResponse(rsp *http.Response) (*PerformRulesBulkActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &PerformRulesBulkActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsNewOutputLogstashSecretsSslKey1 returns the union data inside the NewOutputLogstash_Secrets_Ssl_Key as a NewOutputLogstashSecretsSslKey1 -func (t NewOutputLogstash_Secrets_Ssl_Key) AsNewOutputLogstashSecretsSslKey1() (NewOutputLogstashSecretsSslKey1, error) { - var body NewOutputLogstashSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromNewOutputLogstashSecretsSslKey1 overwrites any union data inside the NewOutputLogstash_Secrets_Ssl_Key as the provided NewOutputLogstashSecretsSslKey1 -func (t *NewOutputLogstash_Secrets_Ssl_Key) FromNewOutputLogstashSecretsSslKey1(v NewOutputLogstashSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeNewOutputLogstashSecretsSslKey1 performs a merge with any union data inside the NewOutputLogstash_Secrets_Ssl_Key, using the provided NewOutputLogstashSecretsSslKey1 -func (t *NewOutputLogstash_Secrets_Ssl_Key) MergeNewOutputLogstashSecretsSslKey1(v NewOutputLogstashSecretsSslKey1) error { - b, err := json.Marshal(v) +// ParseExportRulesResponse parses an HTTP response from a ExportRulesWithResponse call +func ParseExportRulesResponse(rsp *http.Response) (*ExportRulesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ExportRulesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t NewOutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err + return response, nil } -func (t *NewOutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} +// ParseFindRulesResponse parses an HTTP response from a FindRulesWithResponse call +func ParseFindRulesResponse(rsp *http.Response) (*FindRulesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } -// AsNewOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as a NewOutputRemoteElasticsearchSecretsServiceToken0 -func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) AsNewOutputRemoteElasticsearchSecretsServiceToken0() (NewOutputRemoteElasticsearchSecretsServiceToken0, error) { - var body NewOutputRemoteElasticsearchSecretsServiceToken0 - err := json.Unmarshal(t.union, &body) - return body, err -} + response := &FindRulesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// FromNewOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as the provided NewOutputRemoteElasticsearchSecretsServiceToken0 -func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) FromNewOutputRemoteElasticsearchSecretsServiceToken0(v NewOutputRemoteElasticsearchSecretsServiceToken0) error { - b, err := json.Marshal(v) - t.union = b - return err + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Data []SecurityDetectionsAPIRuleResponse `json:"data"` + Page int `json:"page"` + PerPage int `json:"perPage"` + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -// MergeNewOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided NewOutputRemoteElasticsearchSecretsServiceToken0 -func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemoteElasticsearchSecretsServiceToken0(v NewOutputRemoteElasticsearchSecretsServiceToken0) error { - b, err := json.Marshal(v) +// ParseImportRulesResponse parses an HTTP response from a ImportRulesWithResponse call +func ParseImportRulesResponse(rsp *http.Response) (*ImportRulesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ImportRulesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsNewOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as a NewOutputRemoteElasticsearchSecretsServiceToken1 -func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) AsNewOutputRemoteElasticsearchSecretsServiceToken1() (NewOutputRemoteElasticsearchSecretsServiceToken1, error) { - var body NewOutputRemoteElasticsearchSecretsServiceToken1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + ActionConnectorsErrors []SecurityDetectionsAPIErrorSchema `json:"action_connectors_errors"` + ActionConnectorsSuccess bool `json:"action_connectors_success"` + ActionConnectorsSuccessCount int `json:"action_connectors_success_count"` + ActionConnectorsWarnings []SecurityDetectionsAPIWarningSchema `json:"action_connectors_warnings"` + Errors []SecurityDetectionsAPIErrorSchema `json:"errors"` + ExceptionsErrors []SecurityDetectionsAPIErrorSchema `json:"exceptions_errors"` + ExceptionsSuccess bool `json:"exceptions_success"` + ExceptionsSuccessCount int `json:"exceptions_success_count"` + RulesCount int `json:"rules_count"` + Success bool `json:"success"` + SuccessCount int `json:"success_count"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromNewOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken as the provided NewOutputRemoteElasticsearchSecretsServiceToken1 -func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) FromNewOutputRemoteElasticsearchSecretsServiceToken1(v NewOutputRemoteElasticsearchSecretsServiceToken1) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeNewOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided NewOutputRemoteElasticsearchSecretsServiceToken1 -func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) MergeNewOutputRemoteElasticsearchSecretsServiceToken1(v NewOutputRemoteElasticsearchSecretsServiceToken1) error { - b, err := json.Marshal(v) +// ParseInstallPrebuiltRulesAndTimelinesResponse parses an HTTP response from a InstallPrebuiltRulesAndTimelinesWithResponse call +func ParseInstallPrebuiltRulesAndTimelinesResponse(rsp *http.Response) (*InstallPrebuiltRulesAndTimelinesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &InstallPrebuiltRulesAndTimelinesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t NewOutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // RulesInstalled The number of rules installed + RulesInstalled int `json:"rules_installed"` -func (t *NewOutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + // RulesUpdated The number of rules updated + RulesUpdated int `json:"rules_updated"` -// AsNewOutputRemoteElasticsearchSecretsSslKey0 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as a NewOutputRemoteElasticsearchSecretsSslKey0 -func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) AsNewOutputRemoteElasticsearchSecretsSslKey0() (NewOutputRemoteElasticsearchSecretsSslKey0, error) { - var body NewOutputRemoteElasticsearchSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + // TimelinesInstalled The number of timelines installed + TimelinesInstalled int `json:"timelines_installed"` -// FromNewOutputRemoteElasticsearchSecretsSslKey0 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided NewOutputRemoteElasticsearchSecretsSslKey0 -func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) FromNewOutputRemoteElasticsearchSecretsSslKey0(v NewOutputRemoteElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err -} + // TimelinesUpdated The number of timelines updated + TimelinesUpdated int `json:"timelines_updated"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// MergeNewOutputRemoteElasticsearchSecretsSslKey0 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided NewOutputRemoteElasticsearchSecretsSslKey0 -func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeNewOutputRemoteElasticsearchSecretsSslKey0(v NewOutputRemoteElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) - if err != nil { - return err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsNewOutputRemoteElasticsearchSecretsSslKey1 returns the union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as a NewOutputRemoteElasticsearchSecretsSslKey1 -func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) AsNewOutputRemoteElasticsearchSecretsSslKey1() (NewOutputRemoteElasticsearchSecretsSslKey1, error) { - var body NewOutputRemoteElasticsearchSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromNewOutputRemoteElasticsearchSecretsSslKey1 overwrites any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided NewOutputRemoteElasticsearchSecretsSslKey1 -func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) FromNewOutputRemoteElasticsearchSecretsSslKey1(v NewOutputRemoteElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeNewOutputRemoteElasticsearchSecretsSslKey1 performs a merge with any union data inside the NewOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided NewOutputRemoteElasticsearchSecretsSslKey1 -func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeNewOutputRemoteElasticsearchSecretsSslKey1(v NewOutputRemoteElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) +// ParseReadPrebuiltRulesAndTimelinesStatusResponse parses an HTTP response from a ReadPrebuiltRulesAndTimelinesStatusWithResponse call +func ParseReadPrebuiltRulesAndTimelinesStatusResponse(rsp *http.Response) (*ReadPrebuiltRulesAndTimelinesStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ReadPrebuiltRulesAndTimelinesStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t NewOutputRemoteElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // RulesCustomInstalled The total number of custom rules + RulesCustomInstalled int `json:"rules_custom_installed"` -func (t *NewOutputRemoteElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + // RulesInstalled The total number of installed prebuilt rules + RulesInstalled int `json:"rules_installed"` -// AsNewOutputElasticsearch returns the union data inside the NewOutputUnion as a NewOutputElasticsearch -func (t NewOutputUnion) AsNewOutputElasticsearch() (NewOutputElasticsearch, error) { - var body NewOutputElasticsearch - err := json.Unmarshal(t.union, &body) - return body, err -} + // RulesNotInstalled The total number of available prebuilt rules that are not installed + RulesNotInstalled int `json:"rules_not_installed"` -// FromNewOutputElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputElasticsearch -func (t *NewOutputUnion) FromNewOutputElasticsearch(v NewOutputElasticsearch) error { - b, err := json.Marshal(v) - t.union = b - return err -} + // RulesNotUpdated The total number of outdated prebuilt rules + RulesNotUpdated int `json:"rules_not_updated"` -// MergeNewOutputElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputElasticsearch -func (t *NewOutputUnion) MergeNewOutputElasticsearch(v NewOutputElasticsearch) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + // TimelinesInstalled The total number of installed prebuilt timelines + TimelinesInstalled int `json:"timelines_installed"` - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + // TimelinesNotInstalled The total number of available prebuilt timelines that are not installed + TimelinesNotInstalled int `json:"timelines_not_installed"` -// AsNewOutputRemoteElasticsearch returns the union data inside the NewOutputUnion as a NewOutputRemoteElasticsearch -func (t NewOutputUnion) AsNewOutputRemoteElasticsearch() (NewOutputRemoteElasticsearch, error) { - var body NewOutputRemoteElasticsearch - err := json.Unmarshal(t.union, &body) - return body, err -} + // TimelinesNotUpdated The total number of outdated prebuilt timelines + TimelinesNotUpdated int `json:"timelines_not_updated"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromNewOutputRemoteElasticsearch overwrites any union data inside the NewOutputUnion as the provided NewOutputRemoteElasticsearch -func (t *NewOutputUnion) FromNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeNewOutputRemoteElasticsearch performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputRemoteElasticsearch -func (t *NewOutputUnion) MergeNewOutputRemoteElasticsearch(v NewOutputRemoteElasticsearch) error { - b, err := json.Marshal(v) +// ParseRulePreviewResponse parses an HTTP response from a RulePreviewWithResponse call +func ParseRulePreviewResponse(rsp *http.Response) (*RulePreviewResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &RulePreviewResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsNewOutputLogstash returns the union data inside the NewOutputUnion as a NewOutputLogstash -func (t NewOutputUnion) AsNewOutputLogstash() (NewOutputLogstash, error) { - var body NewOutputLogstash - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + IsAborted *bool `json:"isAborted,omitempty"` + Logs []SecurityDetectionsAPIRulePreviewLogs `json:"logs"` -// FromNewOutputLogstash overwrites any union data inside the NewOutputUnion as the provided NewOutputLogstash -func (t *NewOutputUnion) FromNewOutputLogstash(v NewOutputLogstash) error { - b, err := json.Marshal(v) - t.union = b - return err -} + // PreviewId A string that does not contain only whitespace characters + PreviewId *SecurityDetectionsAPINonEmptyString `json:"previewId,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// MergeNewOutputLogstash performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputLogstash -func (t *NewOutputUnion) MergeNewOutputLogstash(v NewOutputLogstash) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// AsNewOutputKafka returns the union data inside the NewOutputUnion as a NewOutputKafka -func (t NewOutputUnion) AsNewOutputKafka() (NewOutputKafka, error) { - var body NewOutputKafka - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// FromNewOutputKafka overwrites any union data inside the NewOutputUnion as the provided NewOutputKafka -func (t *NewOutputUnion) FromNewOutputKafka(v NewOutputKafka) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeNewOutputKafka performs a merge with any union data inside the NewOutputUnion, using the provided NewOutputKafka -func (t *NewOutputUnion) MergeNewOutputKafka(v NewOutputKafka) error { - b, err := json.Marshal(v) +// ParseCreateRuleExceptionListItemsResponse parses an HTTP response from a CreateRuleExceptionListItemsWithResponse call +func ParseCreateRuleExceptionListItemsResponse(rsp *http.Response) (*CreateRuleExceptionListItemsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateRuleExceptionListItemsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t NewOutputUnion) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []SecurityExceptionsAPIExceptionListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *NewOutputUnion) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsOutputElasticsearchSecretsSslKey0 returns the union data inside the OutputElasticsearch_Secrets_Ssl_Key as a OutputElasticsearchSecretsSslKey0 -func (t OutputElasticsearch_Secrets_Ssl_Key) AsOutputElasticsearchSecretsSslKey0() (OutputElasticsearchSecretsSslKey0, error) { - var body OutputElasticsearchSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromOutputElasticsearchSecretsSslKey0 overwrites any union data inside the OutputElasticsearch_Secrets_Ssl_Key as the provided OutputElasticsearchSecretsSslKey0 -func (t *OutputElasticsearch_Secrets_Ssl_Key) FromOutputElasticsearchSecretsSslKey0(v OutputElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// MergeOutputElasticsearchSecretsSslKey0 performs a merge with any union data inside the OutputElasticsearch_Secrets_Ssl_Key, using the provided OutputElasticsearchSecretsSslKey0 -func (t *OutputElasticsearch_Secrets_Ssl_Key) MergeOutputElasticsearchSecretsSslKey0(v OutputElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) - if err != nil { - return err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err + return response, nil } -// AsOutputElasticsearchSecretsSslKey1 returns the union data inside the OutputElasticsearch_Secrets_Ssl_Key as a OutputElasticsearchSecretsSslKey1 -func (t OutputElasticsearch_Secrets_Ssl_Key) AsOutputElasticsearchSecretsSslKey1() (OutputElasticsearchSecretsSslKey1, error) { - var body OutputElasticsearchSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} +// ParseSetAlertAssigneesResponse parses an HTTP response from a SetAlertAssigneesWithResponse call +func ParseSetAlertAssigneesResponse(rsp *http.Response) (*SetAlertAssigneesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } -// FromOutputElasticsearchSecretsSslKey1 overwrites any union data inside the OutputElasticsearch_Secrets_Ssl_Key as the provided OutputElasticsearchSecretsSslKey1 -func (t *OutputElasticsearch_Secrets_Ssl_Key) FromOutputElasticsearchSecretsSslKey1(v OutputElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err + response := &SetAlertAssigneesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// MergeOutputElasticsearchSecretsSslKey1 performs a merge with any union data inside the OutputElasticsearch_Secrets_Ssl_Key, using the provided OutputElasticsearchSecretsSslKey1 -func (t *OutputElasticsearch_Secrets_Ssl_Key) MergeOutputElasticsearchSecretsSslKey1(v OutputElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) +// ParseFinalizeAlertsMigrationResponse parses an HTTP response from a FinalizeAlertsMigrationWithResponse call +func ParseFinalizeAlertsMigrationResponse(rsp *http.Response) (*FinalizeAlertsMigrationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &FinalizeAlertsMigrationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t OutputElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []SecurityDetectionsAPIMigrationFinalizationResult + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *OutputElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsOutputKafkaSecretsPassword0 returns the union data inside the OutputKafka_Secrets_Password as a OutputKafkaSecretsPassword0 -func (t OutputKafka_Secrets_Password) AsOutputKafkaSecretsPassword0() (OutputKafkaSecretsPassword0, error) { - var body OutputKafkaSecretsPassword0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromOutputKafkaSecretsPassword0 overwrites any union data inside the OutputKafka_Secrets_Password as the provided OutputKafkaSecretsPassword0 -func (t *OutputKafka_Secrets_Password) FromOutputKafkaSecretsPassword0(v OutputKafkaSecretsPassword0) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// MergeOutputKafkaSecretsPassword0 performs a merge with any union data inside the OutputKafka_Secrets_Password, using the provided OutputKafkaSecretsPassword0 -func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword0(v OutputKafkaSecretsPassword0) error { - b, err := json.Marshal(v) - if err != nil { - return err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsOutputKafkaSecretsPassword1 returns the union data inside the OutputKafka_Secrets_Password as a OutputKafkaSecretsPassword1 -func (t OutputKafka_Secrets_Password) AsOutputKafkaSecretsPassword1() (OutputKafkaSecretsPassword1, error) { - var body OutputKafkaSecretsPassword1 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromOutputKafkaSecretsPassword1 overwrites any union data inside the OutputKafka_Secrets_Password as the provided OutputKafkaSecretsPassword1 -func (t *OutputKafka_Secrets_Password) FromOutputKafkaSecretsPassword1(v OutputKafkaSecretsPassword1) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeOutputKafkaSecretsPassword1 performs a merge with any union data inside the OutputKafka_Secrets_Password, using the provided OutputKafkaSecretsPassword1 -func (t *OutputKafka_Secrets_Password) MergeOutputKafkaSecretsPassword1(v OutputKafkaSecretsPassword1) error { - b, err := json.Marshal(v) +// ParseAlertsMigrationCleanupResponse parses an HTTP response from a AlertsMigrationCleanupWithResponse call +func ParseAlertsMigrationCleanupResponse(rsp *http.Response) (*AlertsMigrationCleanupResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &AlertsMigrationCleanupResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t OutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []SecurityDetectionsAPIMigrationCleanupResult + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *OutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsOutputKafkaSecretsSslKey0 returns the union data inside the OutputKafka_Secrets_Ssl_Key as a OutputKafkaSecretsSslKey0 -func (t OutputKafka_Secrets_Ssl_Key) AsOutputKafkaSecretsSslKey0() (OutputKafkaSecretsSslKey0, error) { - var body OutputKafkaSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromOutputKafkaSecretsSslKey0 overwrites any union data inside the OutputKafka_Secrets_Ssl_Key as the provided OutputKafkaSecretsSslKey0 -func (t *OutputKafka_Secrets_Ssl_Key) FromOutputKafkaSecretsSslKey0(v OutputKafkaSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// MergeOutputKafkaSecretsSslKey0 performs a merge with any union data inside the OutputKafka_Secrets_Ssl_Key, using the provided OutputKafkaSecretsSslKey0 -func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey0(v OutputKafkaSecretsSslKey0) error { - b, err := json.Marshal(v) - if err != nil { - return err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsOutputKafkaSecretsSslKey1 returns the union data inside the OutputKafka_Secrets_Ssl_Key as a OutputKafkaSecretsSslKey1 -func (t OutputKafka_Secrets_Ssl_Key) AsOutputKafkaSecretsSslKey1() (OutputKafkaSecretsSslKey1, error) { - var body OutputKafkaSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromOutputKafkaSecretsSslKey1 overwrites any union data inside the OutputKafka_Secrets_Ssl_Key as the provided OutputKafkaSecretsSslKey1 -func (t *OutputKafka_Secrets_Ssl_Key) FromOutputKafkaSecretsSslKey1(v OutputKafkaSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeOutputKafkaSecretsSslKey1 performs a merge with any union data inside the OutputKafka_Secrets_Ssl_Key, using the provided OutputKafkaSecretsSslKey1 -func (t *OutputKafka_Secrets_Ssl_Key) MergeOutputKafkaSecretsSslKey1(v OutputKafkaSecretsSslKey1) error { - b, err := json.Marshal(v) +// ParseCreateAlertsMigrationResponse parses an HTTP response from a CreateAlertsMigrationWithResponse call +func ParseCreateAlertsMigrationResponse(rsp *http.Response) (*CreateAlertsMigrationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateAlertsMigrationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t OutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Indices []CreateAlertsMigration_200_Indices_Item `json:"indices"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *OutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsOutputLogstashSecretsSslKey0 returns the union data inside the OutputLogstash_Secrets_Ssl_Key as a OutputLogstashSecretsSslKey0 -func (t OutputLogstash_Secrets_Ssl_Key) AsOutputLogstashSecretsSslKey0() (OutputLogstashSecretsSslKey0, error) { - var body OutputLogstashSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromOutputLogstashSecretsSslKey0 overwrites any union data inside the OutputLogstash_Secrets_Ssl_Key as the provided OutputLogstashSecretsSslKey0 -func (t *OutputLogstash_Secrets_Ssl_Key) FromOutputLogstashSecretsSslKey0(v OutputLogstashSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeOutputLogstashSecretsSslKey0 performs a merge with any union data inside the OutputLogstash_Secrets_Ssl_Key, using the provided OutputLogstashSecretsSslKey0 -func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey0(v OutputLogstashSecretsSslKey0) error { - b, err := json.Marshal(v) +// ParseReadAlertsMigrationStatusResponse parses an HTTP response from a ReadAlertsMigrationStatusWithResponse call +func ParseReadAlertsMigrationStatusResponse(rsp *http.Response) (*ReadAlertsMigrationStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ReadAlertsMigrationStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsOutputLogstashSecretsSslKey1 returns the union data inside the OutputLogstash_Secrets_Ssl_Key as a OutputLogstashSecretsSslKey1 -func (t OutputLogstash_Secrets_Ssl_Key) AsOutputLogstashSecretsSslKey1() (OutputLogstashSecretsSslKey1, error) { - var body OutputLogstashSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Indices []SecurityDetectionsAPIIndexMigrationStatus `json:"indices"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromOutputLogstashSecretsSslKey1 overwrites any union data inside the OutputLogstash_Secrets_Ssl_Key as the provided OutputLogstashSecretsSslKey1 -func (t *OutputLogstash_Secrets_Ssl_Key) FromOutputLogstashSecretsSslKey1(v OutputLogstashSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeOutputLogstashSecretsSslKey1 performs a merge with any union data inside the OutputLogstash_Secrets_Ssl_Key, using the provided OutputLogstashSecretsSslKey1 -func (t *OutputLogstash_Secrets_Ssl_Key) MergeOutputLogstashSecretsSslKey1(v OutputLogstashSecretsSslKey1) error { - b, err := json.Marshal(v) +// ParseSearchAlertsResponse parses an HTTP response from a SearchAlertsWithResponse call +func ParseSearchAlertsResponse(rsp *http.Response) (*SearchAlertsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &SearchAlertsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t OutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *OutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as a OutputRemoteElasticsearchSecretsServiceToken0 -func (t OutputRemoteElasticsearch_Secrets_ServiceToken) AsOutputRemoteElasticsearchSecretsServiceToken0() (OutputRemoteElasticsearchSecretsServiceToken0, error) { - var body OutputRemoteElasticsearchSecretsServiceToken0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as the provided OutputRemoteElasticsearchSecretsServiceToken0 -func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) FromOutputRemoteElasticsearchSecretsServiceToken0(v OutputRemoteElasticsearchSecretsServiceToken0) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken, using the provided OutputRemoteElasticsearchSecretsServiceToken0 -func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasticsearchSecretsServiceToken0(v OutputRemoteElasticsearchSecretsServiceToken0) error { - b, err := json.Marshal(v) +// ParseSetAlertsStatusResponse parses an HTTP response from a SetAlertsStatusWithResponse call +func ParseSetAlertsStatusResponse(rsp *http.Response) (*SetAlertsStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &SetAlertsStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as a OutputRemoteElasticsearchSecretsServiceToken1 -func (t OutputRemoteElasticsearch_Secrets_ServiceToken) AsOutputRemoteElasticsearchSecretsServiceToken1() (OutputRemoteElasticsearchSecretsServiceToken1, error) { - var body OutputRemoteElasticsearchSecretsServiceToken1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken as the provided OutputRemoteElasticsearchSecretsServiceToken1 -func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) FromOutputRemoteElasticsearchSecretsServiceToken1(v OutputRemoteElasticsearchSecretsServiceToken1) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_ServiceToken, using the provided OutputRemoteElasticsearchSecretsServiceToken1 -func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) MergeOutputRemoteElasticsearchSecretsServiceToken1(v OutputRemoteElasticsearchSecretsServiceToken1) error { - b, err := json.Marshal(v) +// ParseSetAlertTagsResponse parses an HTTP response from a SetAlertTagsWithResponse call +func ParseSetAlertTagsResponse(rsp *http.Response) (*SetAlertTagsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &SetAlertTagsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t OutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *OutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsOutputRemoteElasticsearchSecretsSslKey0 returns the union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as a OutputRemoteElasticsearchSecretsSslKey0 -func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) AsOutputRemoteElasticsearchSecretsSslKey0() (OutputRemoteElasticsearchSecretsSslKey0, error) { - var body OutputRemoteElasticsearchSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityDetectionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromOutputRemoteElasticsearchSecretsSslKey0 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as the provided OutputRemoteElasticsearchSecretsSslKey0 -func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) FromOutputRemoteElasticsearchSecretsSslKey0(v OutputRemoteElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityDetectionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeOutputRemoteElasticsearchSecretsSslKey0 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided OutputRemoteElasticsearchSecretsSslKey0 -func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) MergeOutputRemoteElasticsearchSecretsSslKey0(v OutputRemoteElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) +// ParseReadTagsResponse parses an HTTP response from a ReadTagsWithResponse call +func ParseReadTagsResponse(rsp *http.Response) (*ReadTagsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ReadTagsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsOutputRemoteElasticsearchSecretsSslKey1 returns the union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as a OutputRemoteElasticsearchSecretsSslKey1 -func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) AsOutputRemoteElasticsearchSecretsSslKey1() (OutputRemoteElasticsearchSecretsSslKey1, error) { - var body OutputRemoteElasticsearchSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityDetectionsAPIRuleTagArray + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromOutputRemoteElasticsearchSecretsSslKey1 overwrites any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key as the provided OutputRemoteElasticsearchSecretsSslKey1 -func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) FromOutputRemoteElasticsearchSecretsSslKey1(v OutputRemoteElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeOutputRemoteElasticsearchSecretsSslKey1 performs a merge with any union data inside the OutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided OutputRemoteElasticsearchSecretsSslKey1 -func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) MergeOutputRemoteElasticsearchSecretsSslKey1(v OutputRemoteElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) +// ParseRotateEncryptionKeyResponse parses an HTTP response from a RotateEncryptionKeyWithResponse call +func ParseRotateEncryptionKeyResponse(rsp *http.Response) (*RotateEncryptionKeyResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err + } + + response := &RotateEncryptionKeyResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Failed Indicates the number of the saved objects that were still encrypted with one of the old encryption keys that Kibana failed to re-encrypt with the primary key. + Failed *float32 `json:"failed,omitempty"` -func (t OutputRemoteElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + // Successful Indicates the total number of all encrypted saved objects (optionally filtered by the requested `type`), regardless of the key Kibana used for encryption. + // + // NOTE: In most cases, `total` will be greater than `successful` even if `failed` is zero. The reason is that Kibana may not need or may not be able to rotate encryption keys for all encrypted saved objects. + Successful *float32 `json:"successful,omitempty"` -func (t *OutputRemoteElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + // Total Indicates the total number of all encrypted saved objects (optionally filtered by the requested `type`), regardless of the key Kibana used for encryption. + Total *float32 `json:"total,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsOutputElasticsearch returns the union data inside the OutputUnion as a OutputElasticsearch -func (t OutputUnion) AsOutputElasticsearch() (OutputElasticsearch, error) { - var body OutputElasticsearch - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SavedObjects400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// FromOutputElasticsearch overwrites any union data inside the OutputUnion as the provided OutputElasticsearch -func (t *OutputUnion) FromOutputElasticsearch(v OutputElasticsearch) error { - v.Type = "elasticsearch" - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 429: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON429 = &dest + + } + + return response, nil } -// MergeOutputElasticsearch performs a merge with any union data inside the OutputUnion, using the provided OutputElasticsearch -func (t *OutputUnion) MergeOutputElasticsearch(v OutputElasticsearch) error { - v.Type = "elasticsearch" - b, err := json.Marshal(v) +// ParseEndpointGetActionsListResponse parses an HTTP response from a EndpointGetActionsListWithResponse call +func ParseEndpointGetActionsListResponse(rsp *http.Response) (*EndpointGetActionsListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &EndpointGetActionsListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsOutputRemoteElasticsearch returns the union data inside the OutputUnion as a OutputRemoteElasticsearch -func (t OutputUnion) AsOutputRemoteElasticsearch() (OutputRemoteElasticsearch, error) { - var body OutputRemoteElasticsearch - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIGetEndpointActionListResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromOutputRemoteElasticsearch overwrites any union data inside the OutputUnion as the provided OutputRemoteElasticsearch -func (t *OutputUnion) FromOutputRemoteElasticsearch(v OutputRemoteElasticsearch) error { - v.Type = "remote_elasticsearch" - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeOutputRemoteElasticsearch performs a merge with any union data inside the OutputUnion, using the provided OutputRemoteElasticsearch -func (t *OutputUnion) MergeOutputRemoteElasticsearch(v OutputRemoteElasticsearch) error { - v.Type = "remote_elasticsearch" - b, err := json.Marshal(v) +// ParseEndpointExecuteActionResponse parses an HTTP response from a EndpointExecuteActionWithResponse call +func ParseEndpointExecuteActionResponse(rsp *http.Response) (*EndpointExecuteActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &EndpointExecuteActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsOutputLogstash returns the union data inside the OutputUnion as a OutputLogstash -func (t OutputUnion) AsOutputLogstash() (OutputLogstash, error) { - var body OutputLogstash - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIExecuteRouteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromOutputLogstash overwrites any union data inside the OutputUnion as the provided OutputLogstash -func (t *OutputUnion) FromOutputLogstash(v OutputLogstash) error { - v.Type = "logstash" - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeOutputLogstash performs a merge with any union data inside the OutputUnion, using the provided OutputLogstash -func (t *OutputUnion) MergeOutputLogstash(v OutputLogstash) error { - v.Type = "logstash" - b, err := json.Marshal(v) +// ParseEndpointGetFileActionResponse parses an HTTP response from a EndpointGetFileActionWithResponse call +func ParseEndpointGetFileActionResponse(rsp *http.Response) (*EndpointGetFileActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &EndpointGetFileActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsOutputKafka returns the union data inside the OutputUnion as a OutputKafka -func (t OutputUnion) AsOutputKafka() (OutputKafka, error) { - var body OutputKafka - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIGetFileRouteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromOutputKafka overwrites any union data inside the OutputUnion as the provided OutputKafka -func (t *OutputUnion) FromOutputKafka(v OutputKafka) error { - v.Type = "kafka" - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeOutputKafka performs a merge with any union data inside the OutputUnion, using the provided OutputKafka -func (t *OutputUnion) MergeOutputKafka(v OutputKafka) error { - v.Type = "kafka" - b, err := json.Marshal(v) +// ParseEndpointIsolateActionResponse parses an HTTP response from a EndpointIsolateActionWithResponse call +func ParseEndpointIsolateActionResponse(rsp *http.Response) (*EndpointIsolateActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &EndpointIsolateActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIIsolateRouteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t OutputUnion) Discriminator() (string, error) { - var discriminator struct { - Discriminator string `json:"type"` } - err := json.Unmarshal(t.union, &discriminator) - return discriminator.Discriminator, err + + return response, nil } -func (t OutputUnion) ValueByDiscriminator() (interface{}, error) { - discriminator, err := t.Discriminator() +// ParseEndpointKillProcessActionResponse parses an HTTP response from a EndpointKillProcessActionWithResponse call +func ParseEndpointKillProcessActionResponse(rsp *http.Response) (*EndpointKillProcessActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - switch discriminator { - case "elasticsearch": - return t.AsOutputElasticsearch() - case "kafka": - return t.AsOutputKafka() - case "logstash": - return t.AsOutputLogstash() - case "remote_elasticsearch": - return t.AsOutputRemoteElasticsearch() - default: - return nil, errors.New("unknown discriminator value: " + discriminator) - } -} -func (t OutputUnion) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + response := &EndpointKillProcessActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t *OutputUnion) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIKillProcessRouteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 returns the union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0() (PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0, error) { - var body PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 overwrites any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 performs a merge with any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType0) error { - b, err := json.Marshal(v) +// ParseEndpointGetProcessesActionResponse parses an HTTP response from a EndpointGetProcessesActionWithResponse call +func ParseEndpointGetProcessesActionResponse(rsp *http.Response) (*EndpointGetProcessesActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &EndpointGetProcessesActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 returns the union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1() (PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1, error) { - var body PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIGetProcessesRouteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 overwrites any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 performs a merge with any union data inside the PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageInfoInstallationInfoAdditionalSpacesInstalledKibanaType1) error { - b, err := json.Marshal(v) +// ParseRunScriptActionResponse parses an HTTP response from a RunScriptActionWithResponse call +func ParseRunScriptActionResponse(rsp *http.Response) (*RunScriptActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + response := &RunScriptActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t *PackageInfo_InstallationInfo_AdditionalSpacesInstalledKibana_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIResponseActionCreateSuccessResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsPackageInfoInstallationInfoInstalledKibanaType0 returns the union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as a PackageInfoInstallationInfoInstalledKibanaType0 -func (t PackageInfo_InstallationInfo_InstalledKibana_Type) AsPackageInfoInstallationInfoInstalledKibanaType0() (PackageInfoInstallationInfoInstalledKibanaType0, error) { - var body PackageInfoInstallationInfoInstalledKibanaType0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromPackageInfoInstallationInfoInstalledKibanaType0 overwrites any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as the provided PackageInfoInstallationInfoInstalledKibanaType0 -func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) FromPackageInfoInstallationInfoInstalledKibanaType0(v PackageInfoInstallationInfoInstalledKibanaType0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergePackageInfoInstallationInfoInstalledKibanaType0 performs a merge with any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type, using the provided PackageInfoInstallationInfoInstalledKibanaType0 -func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) MergePackageInfoInstallationInfoInstalledKibanaType0(v PackageInfoInstallationInfoInstalledKibanaType0) error { - b, err := json.Marshal(v) +// ParseEndpointScanActionResponse parses an HTTP response from a EndpointScanActionWithResponse call +func ParseEndpointScanActionResponse(rsp *http.Response) (*EndpointScanActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &EndpointScanActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPackageInfoInstallationInfoInstalledKibanaType1 returns the union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as a PackageInfoInstallationInfoInstalledKibanaType1 -func (t PackageInfo_InstallationInfo_InstalledKibana_Type) AsPackageInfoInstallationInfoInstalledKibanaType1() (PackageInfoInstallationInfoInstalledKibanaType1, error) { - var body PackageInfoInstallationInfoInstalledKibanaType1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIScanRouteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPackageInfoInstallationInfoInstalledKibanaType1 overwrites any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type as the provided PackageInfoInstallationInfoInstalledKibanaType1 -func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) FromPackageInfoInstallationInfoInstalledKibanaType1(v PackageInfoInstallationInfoInstalledKibanaType1) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergePackageInfoInstallationInfoInstalledKibanaType1 performs a merge with any union data inside the PackageInfo_InstallationInfo_InstalledKibana_Type, using the provided PackageInfoInstallationInfoInstalledKibanaType1 -func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) MergePackageInfoInstallationInfoInstalledKibanaType1(v PackageInfoInstallationInfoInstalledKibanaType1) error { - b, err := json.Marshal(v) +// ParseEndpointGetActionsStateResponse parses an HTTP response from a EndpointGetActionsStateWithResponse call +func ParseEndpointGetActionsStateResponse(rsp *http.Response) (*EndpointGetActionsStateResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t PackageInfo_InstallationInfo_InstalledKibana_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + response := &EndpointGetActionsStateResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t *PackageInfo_InstallationInfo_InstalledKibana_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIActionStateSuccessResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsPackageInfoType0 returns the union data inside the PackageInfo_Type as a PackageInfoType0 -func (t PackageInfo_Type) AsPackageInfoType0() (PackageInfoType0, error) { - var body PackageInfoType0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromPackageInfoType0 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType0 -func (t *PackageInfo_Type) FromPackageInfoType0(v PackageInfoType0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergePackageInfoType0 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType0 -func (t *PackageInfo_Type) MergePackageInfoType0(v PackageInfoType0) error { - b, err := json.Marshal(v) +// ParseEndpointSuspendProcessActionResponse parses an HTTP response from a EndpointSuspendProcessActionWithResponse call +func ParseEndpointSuspendProcessActionResponse(rsp *http.Response) (*EndpointSuspendProcessActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &EndpointSuspendProcessActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPackageInfoType1 returns the union data inside the PackageInfo_Type as a PackageInfoType1 -func (t PackageInfo_Type) AsPackageInfoType1() (PackageInfoType1, error) { - var body PackageInfoType1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPISuspendProcessRouteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPackageInfoType1 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType1 -func (t *PackageInfo_Type) FromPackageInfoType1(v PackageInfoType1) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergePackageInfoType1 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType1 -func (t *PackageInfo_Type) MergePackageInfoType1(v PackageInfoType1) error { - b, err := json.Marshal(v) +// ParseEndpointUnisolateActionResponse parses an HTTP response from a EndpointUnisolateActionWithResponse call +func ParseEndpointUnisolateActionResponse(rsp *http.Response) (*EndpointUnisolateActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &EndpointUnisolateActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPackageInfoType2 returns the union data inside the PackageInfo_Type as a PackageInfoType2 -func (t PackageInfo_Type) AsPackageInfoType2() (PackageInfoType2, error) { - var body PackageInfoType2 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIUnisolateRouteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPackageInfoType2 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType2 -func (t *PackageInfo_Type) FromPackageInfoType2(v PackageInfoType2) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergePackageInfoType2 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType2 -func (t *PackageInfo_Type) MergePackageInfoType2(v PackageInfoType2) error { - b, err := json.Marshal(v) +// ParseEndpointUploadActionResponse parses an HTTP response from a EndpointUploadActionWithResponse call +func ParseEndpointUploadActionResponse(rsp *http.Response) (*EndpointUploadActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &EndpointUploadActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPackageInfoType3 returns the union data inside the PackageInfo_Type as a PackageInfoType3 -func (t PackageInfo_Type) AsPackageInfoType3() (PackageInfoType3, error) { - var body PackageInfoType3 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIUploadRouteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPackageInfoType3 overwrites any union data inside the PackageInfo_Type as the provided PackageInfoType3 -func (t *PackageInfo_Type) FromPackageInfoType3(v PackageInfoType3) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergePackageInfoType3 performs a merge with any union data inside the PackageInfo_Type, using the provided PackageInfoType3 -func (t *PackageInfo_Type) MergePackageInfoType3(v PackageInfoType3) error { - b, err := json.Marshal(v) +// ParseEndpointGetActionsDetailsResponse parses an HTTP response from a EndpointGetActionsDetailsWithResponse call +func ParseEndpointGetActionsDetailsResponse(rsp *http.Response) (*EndpointGetActionsDetailsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t PackageInfo_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + response := &EndpointGetActionsDetailsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t *PackageInfo_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIGetEndpointActionResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 returns the union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0() (PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0, error) { - var body PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 overwrites any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 performs a merge with any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0 -func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType0) error { - b, err := json.Marshal(v) +// ParseEndpointFileInfoResponse parses an HTTP response from a EndpointFileInfoWithResponse call +func ParseEndpointFileInfoResponse(rsp *http.Response) (*EndpointFileInfoResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &EndpointFileInfoResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 returns the union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as a PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) AsPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1() (PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1, error) { - var body PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPISuccessResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 overwrites any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type as the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) FromPackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 performs a merge with any union data inside the PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type, using the provided PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1 -func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MergePackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1(v PackageListItemInstallationInfoAdditionalSpacesInstalledKibanaType1) error { - b, err := json.Marshal(v) +// ParseEndpointFileDownloadResponse parses an HTTP response from a EndpointFileDownloadWithResponse call +func ParseEndpointFileDownloadResponse(rsp *http.Response) (*EndpointFileDownloadResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + response := &EndpointFileDownloadResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t *PackageListItem_InstallationInfo_AdditionalSpacesInstalledKibana_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPISuccessResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsPackageListItemInstallationInfoInstalledKibanaType0 returns the union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as a PackageListItemInstallationInfoInstalledKibanaType0 -func (t PackageListItem_InstallationInfo_InstalledKibana_Type) AsPackageListItemInstallationInfoInstalledKibanaType0() (PackageListItemInstallationInfoInstalledKibanaType0, error) { - var body PackageListItemInstallationInfoInstalledKibanaType0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromPackageListItemInstallationInfoInstalledKibanaType0 overwrites any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as the provided PackageListItemInstallationInfoInstalledKibanaType0 -func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) FromPackageListItemInstallationInfoInstalledKibanaType0(v PackageListItemInstallationInfoInstalledKibanaType0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergePackageListItemInstallationInfoInstalledKibanaType0 performs a merge with any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type, using the provided PackageListItemInstallationInfoInstalledKibanaType0 -func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) MergePackageListItemInstallationInfoInstalledKibanaType0(v PackageListItemInstallationInfoInstalledKibanaType0) error { - b, err := json.Marshal(v) +// ParseEndpointGetActionsStatusResponse parses an HTTP response from a EndpointGetActionsStatusWithResponse call +func ParseEndpointGetActionsStatusResponse(rsp *http.Response) (*EndpointGetActionsStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &EndpointGetActionsStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPackageListItemInstallationInfoInstalledKibanaType1 returns the union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as a PackageListItemInstallationInfoInstalledKibanaType1 -func (t PackageListItem_InstallationInfo_InstalledKibana_Type) AsPackageListItemInstallationInfoInstalledKibanaType1() (PackageListItemInstallationInfoInstalledKibanaType1, error) { - var body PackageListItemInstallationInfoInstalledKibanaType1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIActionStatusSuccessResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPackageListItemInstallationInfoInstalledKibanaType1 overwrites any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type as the provided PackageListItemInstallationInfoInstalledKibanaType1 -func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) FromPackageListItemInstallationInfoInstalledKibanaType1(v PackageListItemInstallationInfoInstalledKibanaType1) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergePackageListItemInstallationInfoInstalledKibanaType1 performs a merge with any union data inside the PackageListItem_InstallationInfo_InstalledKibana_Type, using the provided PackageListItemInstallationInfoInstalledKibanaType1 -func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) MergePackageListItemInstallationInfoInstalledKibanaType1(v PackageListItemInstallationInfoInstalledKibanaType1) error { - b, err := json.Marshal(v) +// ParseGetEndpointMetadataListResponse parses an HTTP response from a GetEndpointMetadataListWithResponse call +func ParseGetEndpointMetadataListResponse(rsp *http.Response) (*GetEndpointMetadataListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t PackageListItem_InstallationInfo_InstalledKibana_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + response := &GetEndpointMetadataListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t *PackageListItem_InstallationInfo_InstalledKibana_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIMetadataListResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsPackageListItemType0 returns the union data inside the PackageListItem_Type as a PackageListItemType0 -func (t PackageListItem_Type) AsPackageListItemType0() (PackageListItemType0, error) { - var body PackageListItemType0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromPackageListItemType0 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType0 -func (t *PackageListItem_Type) FromPackageListItemType0(v PackageListItemType0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergePackageListItemType0 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType0 -func (t *PackageListItem_Type) MergePackageListItemType0(v PackageListItemType0) error { - b, err := json.Marshal(v) +// ParseGetEndpointMetadataResponse parses an HTTP response from a GetEndpointMetadataWithResponse call +func ParseGetEndpointMetadataResponse(rsp *http.Response) (*GetEndpointMetadataResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetEndpointMetadataResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPackageListItemType1 returns the union data inside the PackageListItem_Type as a PackageListItemType1 -func (t PackageListItem_Type) AsPackageListItemType1() (PackageListItemType1, error) { - var body PackageListItemType1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIEndpointMetadataResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPackageListItemType1 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType1 -func (t *PackageListItem_Type) FromPackageListItemType1(v PackageListItemType1) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergePackageListItemType1 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType1 -func (t *PackageListItem_Type) MergePackageListItemType1(v PackageListItemType1) error { - b, err := json.Marshal(v) +// ParseGetPolicyResponseResponse parses an HTTP response from a GetPolicyResponseWithResponse call +func ParseGetPolicyResponseResponse(rsp *http.Response) (*GetPolicyResponseResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetPolicyResponseResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPackageListItemType2 returns the union data inside the PackageListItem_Type as a PackageListItemType2 -func (t PackageListItem_Type) AsPackageListItemType2() (PackageListItemType2, error) { - var body PackageListItemType2 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPISuccessResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPackageListItemType2 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType2 -func (t *PackageListItem_Type) FromPackageListItemType2(v PackageListItemType2) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergePackageListItemType2 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType2 -func (t *PackageListItem_Type) MergePackageListItemType2(v PackageListItemType2) error { - b, err := json.Marshal(v) +// ParseGetProtectionUpdatesNoteResponse parses an HTTP response from a GetProtectionUpdatesNoteWithResponse call +func ParseGetProtectionUpdatesNoteResponse(rsp *http.Response) (*GetProtectionUpdatesNoteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetProtectionUpdatesNoteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPackageListItemType3 returns the union data inside the PackageListItem_Type as a PackageListItemType3 -func (t PackageListItem_Type) AsPackageListItemType3() (PackageListItemType3, error) { - var body PackageListItemType3 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIProtectionUpdatesNoteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPackageListItemType3 overwrites any union data inside the PackageListItem_Type as the provided PackageListItemType3 -func (t *PackageListItem_Type) FromPackageListItemType3(v PackageListItemType3) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergePackageListItemType3 performs a merge with any union data inside the PackageListItem_Type, using the provided PackageListItemType3 -func (t *PackageListItem_Type) MergePackageListItemType3(v PackageListItemType3) error { - b, err := json.Marshal(v) +// ParseCreateUpdateProtectionUpdatesNoteResponse parses an HTTP response from a CreateUpdateProtectionUpdatesNoteWithResponse call +func ParseCreateUpdateProtectionUpdatesNoteResponse(rsp *http.Response) (*CreateUpdateProtectionUpdatesNoteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -func (t PackageListItem_Type) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + response := &CreateUpdateProtectionUpdatesNoteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t *PackageListItem_Type) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointManagementAPIProtectionUpdatesNoteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// AsServerHostSecretsSslEsKey0 returns the union data inside the ServerHost_Secrets_Ssl_EsKey as a ServerHostSecretsSslEsKey0 -func (t ServerHost_Secrets_Ssl_EsKey) AsServerHostSecretsSslEsKey0() (ServerHostSecretsSslEsKey0, error) { - var body ServerHostSecretsSslEsKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromServerHostSecretsSslEsKey0 overwrites any union data inside the ServerHost_Secrets_Ssl_EsKey as the provided ServerHostSecretsSslEsKey0 -func (t *ServerHost_Secrets_Ssl_EsKey) FromServerHostSecretsSslEsKey0(v ServerHostSecretsSslEsKey0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeServerHostSecretsSslEsKey0 performs a merge with any union data inside the ServerHost_Secrets_Ssl_EsKey, using the provided ServerHostSecretsSslEsKey0 -func (t *ServerHost_Secrets_Ssl_EsKey) MergeServerHostSecretsSslEsKey0(v ServerHostSecretsSslEsKey0) error { - b, err := json.Marshal(v) +// ParseCreateEndpointListResponse parses an HTTP response from a CreateEndpointListWithResponse call +func ParseCreateEndpointListResponse(rsp *http.Response) (*CreateEndpointListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateEndpointListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsServerHostSecretsSslEsKey1 returns the union data inside the ServerHost_Secrets_Ssl_EsKey as a ServerHostSecretsSslEsKey1 -func (t ServerHost_Secrets_Ssl_EsKey) AsServerHostSecretsSslEsKey1() (ServerHostSecretsSslEsKey1, error) { - var body ServerHostSecretsSslEsKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointExceptionsAPIEndpointList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromServerHostSecretsSslEsKey1 overwrites any union data inside the ServerHost_Secrets_Ssl_EsKey as the provided ServerHostSecretsSslEsKey1 -func (t *ServerHost_Secrets_Ssl_EsKey) FromServerHostSecretsSslEsKey1(v ServerHostSecretsSslEsKey1) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityEndpointExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeServerHostSecretsSslEsKey1 performs a merge with any union data inside the ServerHost_Secrets_Ssl_EsKey, using the provided ServerHostSecretsSslEsKey1 -func (t *ServerHost_Secrets_Ssl_EsKey) MergeServerHostSecretsSslEsKey1(v ServerHostSecretsSslEsKey1) error { - b, err := json.Marshal(v) +// ParseDeleteEndpointListItemResponse parses an HTTP response from a DeleteEndpointListItemWithResponse call +func ParseDeleteEndpointListItemResponse(rsp *http.Response) (*DeleteEndpointListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteEndpointListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t ServerHost_Secrets_Ssl_EsKey) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointExceptionsAPIEndpointListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *ServerHost_Secrets_Ssl_EsKey) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// AsServerHostSecretsSslKey0 returns the union data inside the ServerHost_Secrets_Ssl_Key as a ServerHostSecretsSslKey0 -func (t ServerHost_Secrets_Ssl_Key) AsServerHostSecretsSslKey0() (ServerHostSecretsSslKey0, error) { - var body ServerHostSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// FromServerHostSecretsSslKey0 overwrites any union data inside the ServerHost_Secrets_Ssl_Key as the provided ServerHostSecretsSslKey0 -func (t *ServerHost_Secrets_Ssl_Key) FromServerHostSecretsSslKey0(v ServerHostSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// MergeServerHostSecretsSslKey0 performs a merge with any union data inside the ServerHost_Secrets_Ssl_Key, using the provided ServerHostSecretsSslKey0 -func (t *ServerHost_Secrets_Ssl_Key) MergeServerHostSecretsSslKey0(v ServerHostSecretsSslKey0) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityEndpointExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityEndpointExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// AsServerHostSecretsSslKey1 returns the union data inside the ServerHost_Secrets_Ssl_Key as a ServerHostSecretsSslKey1 -func (t ServerHost_Secrets_Ssl_Key) AsServerHostSecretsSslKey1() (ServerHostSecretsSslKey1, error) { - var body ServerHostSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromServerHostSecretsSslKey1 overwrites any union data inside the ServerHost_Secrets_Ssl_Key as the provided ServerHostSecretsSslKey1 -func (t *ServerHost_Secrets_Ssl_Key) FromServerHostSecretsSslKey1(v ServerHostSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeServerHostSecretsSslKey1 performs a merge with any union data inside the ServerHost_Secrets_Ssl_Key, using the provided ServerHostSecretsSslKey1 -func (t *ServerHost_Secrets_Ssl_Key) MergeServerHostSecretsSslKey1(v ServerHostSecretsSslKey1) error { - b, err := json.Marshal(v) +// ParseReadEndpointListItemResponse parses an HTTP response from a ReadEndpointListItemWithResponse call +func ParseReadEndpointListItemResponse(rsp *http.Response) (*ReadEndpointListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ReadEndpointListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t ServerHost_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []SecurityEndpointExceptionsAPIEndpointListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *ServerHost_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityEndpointExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityEndpointExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// AsBedrockConfig returns the union data inside the UpdateConnectorConfig as a BedrockConfig -func (t UpdateConnectorConfig) AsBedrockConfig() (BedrockConfig, error) { - var body BedrockConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromBedrockConfig overwrites any union data inside the UpdateConnectorConfig as the provided BedrockConfig -func (t *UpdateConnectorConfig) FromBedrockConfig(v BedrockConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeBedrockConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided BedrockConfig -func (t *UpdateConnectorConfig) MergeBedrockConfig(v BedrockConfig) error { - b, err := json.Marshal(v) +// ParseCreateEndpointListItemResponse parses an HTTP response from a CreateEndpointListItemWithResponse call +func ParseCreateEndpointListItemResponse(rsp *http.Response) (*CreateEndpointListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateEndpointListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsCrowdstrikeConfig returns the union data inside the UpdateConnectorConfig as a CrowdstrikeConfig -func (t UpdateConnectorConfig) AsCrowdstrikeConfig() (CrowdstrikeConfig, error) { - var body CrowdstrikeConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointExceptionsAPIEndpointListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromCrowdstrikeConfig overwrites any union data inside the UpdateConnectorConfig as the provided CrowdstrikeConfig -func (t *UpdateConnectorConfig) FromCrowdstrikeConfig(v CrowdstrikeConfig) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeCrowdstrikeConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided CrowdstrikeConfig -func (t *UpdateConnectorConfig) MergeCrowdstrikeConfig(v CrowdstrikeConfig) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsD3securityConfig returns the union data inside the UpdateConnectorConfig as a D3securityConfig -func (t UpdateConnectorConfig) AsD3securityConfig() (D3securityConfig, error) { - var body D3securityConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest SecurityEndpointExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest -// FromD3securityConfig overwrites any union data inside the UpdateConnectorConfig as the provided D3securityConfig -func (t *UpdateConnectorConfig) FromD3securityConfig(v D3securityConfig) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityEndpointExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// MergeD3securityConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided D3securityConfig -func (t *UpdateConnectorConfig) MergeD3securityConfig(v D3securityConfig) error { - b, err := json.Marshal(v) - if err != nil { - return err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsEmailConfig returns the union data inside the UpdateConnectorConfig as a EmailConfig -func (t UpdateConnectorConfig) AsEmailConfig() (EmailConfig, error) { - var body EmailConfig - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromEmailConfig overwrites any union data inside the UpdateConnectorConfig as the provided EmailConfig -func (t *UpdateConnectorConfig) FromEmailConfig(v EmailConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeEmailConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided EmailConfig -func (t *UpdateConnectorConfig) MergeEmailConfig(v EmailConfig) error { - b, err := json.Marshal(v) +// ParseUpdateEndpointListItemResponse parses an HTTP response from a UpdateEndpointListItemWithResponse call +func ParseUpdateEndpointListItemResponse(rsp *http.Response) (*UpdateEndpointListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &UpdateEndpointListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsGeminiConfig returns the union data inside the UpdateConnectorConfig as a GeminiConfig -func (t UpdateConnectorConfig) AsGeminiConfig() (GeminiConfig, error) { - var body GeminiConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEndpointExceptionsAPIEndpointListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromGeminiConfig overwrites any union data inside the UpdateConnectorConfig as the provided GeminiConfig -func (t *UpdateConnectorConfig) FromGeminiConfig(v GeminiConfig) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeGeminiConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided GeminiConfig -func (t *UpdateConnectorConfig) MergeGeminiConfig(v GeminiConfig) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsResilientConfig returns the union data inside the UpdateConnectorConfig as a ResilientConfig -func (t UpdateConnectorConfig) AsResilientConfig() (ResilientConfig, error) { - var body ResilientConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityEndpointExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// FromResilientConfig overwrites any union data inside the UpdateConnectorConfig as the provided ResilientConfig -func (t *UpdateConnectorConfig) FromResilientConfig(v ResilientConfig) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityEndpointExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// MergeResilientConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ResilientConfig -func (t *UpdateConnectorConfig) MergeResilientConfig(v ResilientConfig) error { - b, err := json.Marshal(v) - if err != nil { - return err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsIndexConfig returns the union data inside the UpdateConnectorConfig as a IndexConfig -func (t UpdateConnectorConfig) AsIndexConfig() (IndexConfig, error) { - var body IndexConfig - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromIndexConfig overwrites any union data inside the UpdateConnectorConfig as the provided IndexConfig -func (t *UpdateConnectorConfig) FromIndexConfig(v IndexConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeIndexConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided IndexConfig -func (t *UpdateConnectorConfig) MergeIndexConfig(v IndexConfig) error { - b, err := json.Marshal(v) +// ParseFindEndpointListItemsResponse parses an HTTP response from a FindEndpointListItemsWithResponse call +func ParseFindEndpointListItemsResponse(rsp *http.Response) (*FindEndpointListItemsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &FindEndpointListItemsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsJiraConfig returns the union data inside the UpdateConnectorConfig as a JiraConfig -func (t UpdateConnectorConfig) AsJiraConfig() (JiraConfig, error) { - var body JiraConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Data []SecurityEndpointExceptionsAPIEndpointListItem `json:"data"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Pit *string `json:"pit,omitempty"` + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromJiraConfig overwrites any union data inside the UpdateConnectorConfig as the provided JiraConfig -func (t *UpdateConnectorConfig) FromJiraConfig(v JiraConfig) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeJiraConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided JiraConfig -func (t *UpdateConnectorConfig) MergeJiraConfig(v JiraConfig) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityEndpointExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsDefenderConfig returns the union data inside the UpdateConnectorConfig as a DefenderConfig -func (t UpdateConnectorConfig) AsDefenderConfig() (DefenderConfig, error) { - var body DefenderConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityEndpointExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// FromDefenderConfig overwrites any union data inside the UpdateConnectorConfig as the provided DefenderConfig -func (t *UpdateConnectorConfig) FromDefenderConfig(v DefenderConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityEndpointExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeDefenderConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided DefenderConfig -func (t *UpdateConnectorConfig) MergeDefenderConfig(v DefenderConfig) error { - b, err := json.Marshal(v) +// ParseDeleteMonitoringEngineResponse parses an HTTP response from a DeleteMonitoringEngineWithResponse call +func ParseDeleteMonitoringEngineResponse(rsp *http.Response) (*DeleteMonitoringEngineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteMonitoringEngineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsGenaiAzureConfig returns the union data inside the UpdateConnectorConfig as a GenaiAzureConfig -func (t UpdateConnectorConfig) AsGenaiAzureConfig() (GenaiAzureConfig, error) { - var body GenaiAzureConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Deleted bool `json:"deleted"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromGenaiAzureConfig overwrites any union data inside the UpdateConnectorConfig as the provided GenaiAzureConfig -func (t *UpdateConnectorConfig) FromGenaiAzureConfig(v GenaiAzureConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeGenaiAzureConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided GenaiAzureConfig -func (t *UpdateConnectorConfig) MergeGenaiAzureConfig(v GenaiAzureConfig) error { - b, err := json.Marshal(v) +// ParseDisableMonitoringEngineResponse parses an HTTP response from a DisableMonitoringEngineWithResponse call +func ParseDisableMonitoringEngineResponse(rsp *http.Response) (*DisableMonitoringEngineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DisableMonitoringEngineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsGenaiOpenaiConfig returns the union data inside the UpdateConnectorConfig as a GenaiOpenaiConfig -func (t UpdateConnectorConfig) AsGenaiOpenaiConfig() (GenaiOpenaiConfig, error) { - var body GenaiOpenaiConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEntityAnalyticsAPIMonitoringEngineDescriptor + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromGenaiOpenaiConfig overwrites any union data inside the UpdateConnectorConfig as the provided GenaiOpenaiConfig -func (t *UpdateConnectorConfig) FromGenaiOpenaiConfig(v GenaiOpenaiConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeGenaiOpenaiConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided GenaiOpenaiConfig -func (t *UpdateConnectorConfig) MergeGenaiOpenaiConfig(v GenaiOpenaiConfig) error { - b, err := json.Marshal(v) +// ParseInitMonitoringEngineResponse parses an HTTP response from a InitMonitoringEngineWithResponse call +func ParseInitMonitoringEngineResponse(rsp *http.Response) (*InitMonitoringEngineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &InitMonitoringEngineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsOpsgenieConfig returns the union data inside the UpdateConnectorConfig as a OpsgenieConfig -func (t UpdateConnectorConfig) AsOpsgenieConfig() (OpsgenieConfig, error) { - var body OpsgenieConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEntityAnalyticsAPIMonitoringEngineDescriptor + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromOpsgenieConfig overwrites any union data inside the UpdateConnectorConfig as the provided OpsgenieConfig -func (t *UpdateConnectorConfig) FromOpsgenieConfig(v OpsgenieConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeOpsgenieConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided OpsgenieConfig -func (t *UpdateConnectorConfig) MergeOpsgenieConfig(v OpsgenieConfig) error { - b, err := json.Marshal(v) +// ParseScheduleMonitoringEngineResponse parses an HTTP response from a ScheduleMonitoringEngineWithResponse call +func ParseScheduleMonitoringEngineResponse(rsp *http.Response) (*ScheduleMonitoringEngineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ScheduleMonitoringEngineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPagerdutyConfig returns the union data inside the UpdateConnectorConfig as a PagerdutyConfig -func (t UpdateConnectorConfig) AsPagerdutyConfig() (PagerdutyConfig, error) { - var body PagerdutyConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Success Indicates the scheduling was successful + Success *bool `json:"success,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPagerdutyConfig overwrites any union data inside the UpdateConnectorConfig as the provided PagerdutyConfig -func (t *UpdateConnectorConfig) FromPagerdutyConfig(v PagerdutyConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest struct { + // Message Error message indicating the engine is already running + Message *string `json:"message,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest + + } + + return response, nil } -// MergePagerdutyConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided PagerdutyConfig -func (t *UpdateConnectorConfig) MergePagerdutyConfig(v PagerdutyConfig) error { - b, err := json.Marshal(v) +// ParsePrivMonHealthResponse parses an HTTP response from a PrivMonHealthWithResponse call +func ParsePrivMonHealthResponse(rsp *http.Response) (*PrivMonHealthResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &PrivMonHealthResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsSentineloneConfig returns the union data inside the UpdateConnectorConfig as a SentineloneConfig -func (t UpdateConnectorConfig) AsSentineloneConfig() (SentineloneConfig, error) { - var body SentineloneConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Error *struct { + Message *string `json:"message,omitempty"` + } `json:"error,omitempty"` + Status SecurityEntityAnalyticsAPIPrivilegeMonitoringEngineStatus `json:"status"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromSentineloneConfig overwrites any union data inside the UpdateConnectorConfig as the provided SentineloneConfig -func (t *UpdateConnectorConfig) FromSentineloneConfig(v SentineloneConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeSentineloneConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided SentineloneConfig -func (t *UpdateConnectorConfig) MergeSentineloneConfig(v SentineloneConfig) error { - b, err := json.Marshal(v) +// ParsePrivMonPrivilegesResponse parses an HTTP response from a PrivMonPrivilegesWithResponse call +func ParsePrivMonPrivilegesResponse(rsp *http.Response) (*PrivMonPrivilegesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &PrivMonPrivilegesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsServicenowConfig returns the union data inside the UpdateConnectorConfig as a ServicenowConfig -func (t UpdateConnectorConfig) AsServicenowConfig() (ServicenowConfig, error) { - var body ServicenowConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEntityAnalyticsAPIEntityAnalyticsPrivileges + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } -// FromServicenowConfig overwrites any union data inside the UpdateConnectorConfig as the provided ServicenowConfig -func (t *UpdateConnectorConfig) FromServicenowConfig(v ServicenowConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeServicenowConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ServicenowConfig -func (t *UpdateConnectorConfig) MergeServicenowConfig(v ServicenowConfig) error { - b, err := json.Marshal(v) +// ParseCreatePrivMonUserResponse parses an HTTP response from a CreatePrivMonUserWithResponse call +func ParseCreatePrivMonUserResponse(rsp *http.Response) (*CreatePrivMonUserResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreatePrivMonUserResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsServicenowItomConfig returns the union data inside the UpdateConnectorConfig as a ServicenowItomConfig -func (t UpdateConnectorConfig) AsServicenowItomConfig() (ServicenowItomConfig, error) { - var body ServicenowItomConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEntityAnalyticsAPIMonitoredUserDoc + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromServicenowItomConfig overwrites any union data inside the UpdateConnectorConfig as the provided ServicenowItomConfig -func (t *UpdateConnectorConfig) FromServicenowItomConfig(v ServicenowItomConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeServicenowItomConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ServicenowItomConfig -func (t *UpdateConnectorConfig) MergeServicenowItomConfig(v ServicenowItomConfig) error { - b, err := json.Marshal(v) +// ParsePrivmonBulkUploadUsersCSVResponse parses an HTTP response from a PrivmonBulkUploadUsersCSVWithResponse call +func ParsePrivmonBulkUploadUsersCSVResponse(rsp *http.Response) (*PrivmonBulkUploadUsersCSVResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &PrivmonBulkUploadUsersCSVResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsSlackApiConfig returns the union data inside the UpdateConnectorConfig as a SlackApiConfig -func (t UpdateConnectorConfig) AsSlackApiConfig() (SlackApiConfig, error) { - var body SlackApiConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Errors []SecurityEntityAnalyticsAPIPrivmonUserCsvUploadErrorItem `json:"errors"` + Stats SecurityEntityAnalyticsAPIPrivmonUserCsvUploadStats `json:"stats"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromSlackApiConfig overwrites any union data inside the UpdateConnectorConfig as the provided SlackApiConfig -func (t *UpdateConnectorConfig) FromSlackApiConfig(v SlackApiConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeSlackApiConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided SlackApiConfig -func (t *UpdateConnectorConfig) MergeSlackApiConfig(v SlackApiConfig) error { - b, err := json.Marshal(v) +// ParseListPrivMonUsersResponse parses an HTTP response from a ListPrivMonUsersWithResponse call +func ParseListPrivMonUsersResponse(rsp *http.Response) (*ListPrivMonUsersResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ListPrivMonUsersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsSwimlaneConfig returns the union data inside the UpdateConnectorConfig as a SwimlaneConfig -func (t UpdateConnectorConfig) AsSwimlaneConfig() (SwimlaneConfig, error) { - var body SwimlaneConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []SecurityEntityAnalyticsAPIMonitoredUserDoc + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromSwimlaneConfig overwrites any union data inside the UpdateConnectorConfig as the provided SwimlaneConfig -func (t *UpdateConnectorConfig) FromSwimlaneConfig(v SwimlaneConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeSwimlaneConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided SwimlaneConfig -func (t *UpdateConnectorConfig) MergeSwimlaneConfig(v SwimlaneConfig) error { - b, err := json.Marshal(v) +// ParseDeletePrivMonUserResponse parses an HTTP response from a DeletePrivMonUserWithResponse call +func ParseDeletePrivMonUserResponse(rsp *http.Response) (*DeletePrivMonUserResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeletePrivMonUserResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsThehiveConfig returns the union data inside the UpdateConnectorConfig as a ThehiveConfig -func (t UpdateConnectorConfig) AsThehiveConfig() (ThehiveConfig, error) { - var body ThehiveConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Acknowledged Indicates if the deletion was successful + Acknowledged *bool `json:"acknowledged,omitempty"` -// FromThehiveConfig overwrites any union data inside the UpdateConnectorConfig as the provided ThehiveConfig -func (t *UpdateConnectorConfig) FromThehiveConfig(v ThehiveConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + // Message A message providing additional information about the deletion status + Message *string `json:"message,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -// MergeThehiveConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided ThehiveConfig -func (t *UpdateConnectorConfig) MergeThehiveConfig(v ThehiveConfig) error { - b, err := json.Marshal(v) +// ParseUpdatePrivMonUserResponse parses an HTTP response from a UpdatePrivMonUserWithResponse call +func ParseUpdatePrivMonUserResponse(rsp *http.Response) (*UpdatePrivMonUserResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &UpdatePrivMonUserResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsTinesConfig returns the union data inside the UpdateConnectorConfig as a TinesConfig -func (t UpdateConnectorConfig) AsTinesConfig() (TinesConfig, error) { - var body TinesConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEntityAnalyticsAPIMonitoredUserDoc + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromTinesConfig overwrites any union data inside the UpdateConnectorConfig as the provided TinesConfig -func (t *UpdateConnectorConfig) FromTinesConfig(v TinesConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeTinesConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided TinesConfig -func (t *UpdateConnectorConfig) MergeTinesConfig(v TinesConfig) error { - b, err := json.Marshal(v) +// ParseInstallPrivilegedAccessDetectionPackageResponse parses an HTTP response from a InstallPrivilegedAccessDetectionPackageWithResponse call +func ParseInstallPrivilegedAccessDetectionPackageResponse(rsp *http.Response) (*InstallPrivilegedAccessDetectionPackageResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &InstallPrivilegedAccessDetectionPackageResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsTorqConfig returns the union data inside the UpdateConnectorConfig as a TorqConfig -func (t UpdateConnectorConfig) AsTorqConfig() (TorqConfig, error) { - var body TorqConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Message string `json:"message"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromTorqConfig overwrites any union data inside the UpdateConnectorConfig as the provided TorqConfig -func (t *UpdateConnectorConfig) FromTorqConfig(v TorqConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeTorqConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided TorqConfig -func (t *UpdateConnectorConfig) MergeTorqConfig(v TorqConfig) error { - b, err := json.Marshal(v) +// ParseGetPrivilegedAccessDetectionPackageStatusResponse parses an HTTP response from a GetPrivilegedAccessDetectionPackageStatusWithResponse call +func ParseGetPrivilegedAccessDetectionPackageStatusResponse(rsp *http.Response) (*GetPrivilegedAccessDetectionPackageStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetPrivilegedAccessDetectionPackageStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsWebhookConfig returns the union data inside the UpdateConnectorConfig as a WebhookConfig -func (t UpdateConnectorConfig) AsWebhookConfig() (WebhookConfig, error) { - var body WebhookConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Jobs []struct { + Description *string `json:"description,omitempty"` + JobId string `json:"job_id"` + State GetPrivilegedAccessDetectionPackageStatus200JobsState `json:"state"` + } `json:"jobs"` + MlModuleSetupStatus GetPrivilegedAccessDetectionPackageStatus200MlModuleSetupStatus `json:"ml_module_setup_status"` + PackageInstallationStatus GetPrivilegedAccessDetectionPackageStatus200PackageInstallationStatus `json:"package_installation_status"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromWebhookConfig overwrites any union data inside the UpdateConnectorConfig as the provided WebhookConfig -func (t *UpdateConnectorConfig) FromWebhookConfig(v WebhookConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeWebhookConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided WebhookConfig -func (t *UpdateConnectorConfig) MergeWebhookConfig(v WebhookConfig) error { - b, err := json.Marshal(v) +// ParseInitEntityStoreResponse parses an HTTP response from a InitEntityStoreWithResponse call +func ParseInitEntityStoreResponse(rsp *http.Response) (*InitEntityStoreResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &InitEntityStoreResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsCasesWebhookConfig returns the union data inside the UpdateConnectorConfig as a CasesWebhookConfig -func (t UpdateConnectorConfig) AsCasesWebhookConfig() (CasesWebhookConfig, error) { - var body CasesWebhookConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Engines *[]SecurityEntityAnalyticsAPIEngineDescriptor `json:"engines,omitempty"` + Succeeded *bool `json:"succeeded,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromCasesWebhookConfig overwrites any union data inside the UpdateConnectorConfig as the provided CasesWebhookConfig -func (t *UpdateConnectorConfig) FromCasesWebhookConfig(v CasesWebhookConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeCasesWebhookConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided CasesWebhookConfig -func (t *UpdateConnectorConfig) MergeCasesWebhookConfig(v CasesWebhookConfig) error { - b, err := json.Marshal(v) +// ParseListEntityEnginesResponse parses an HTTP response from a ListEntityEnginesWithResponse call +func ParseListEntityEnginesResponse(rsp *http.Response) (*ListEntityEnginesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ListEntityEnginesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsXmattersConfig returns the union data inside the UpdateConnectorConfig as a XmattersConfig -func (t UpdateConnectorConfig) AsXmattersConfig() (XmattersConfig, error) { - var body XmattersConfig - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Count *int `json:"count,omitempty"` + Engines *[]SecurityEntityAnalyticsAPIEngineDescriptor `json:"engines,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromXmattersConfig overwrites any union data inside the UpdateConnectorConfig as the provided XmattersConfig -func (t *UpdateConnectorConfig) FromXmattersConfig(v XmattersConfig) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeXmattersConfig performs a merge with any union data inside the UpdateConnectorConfig, using the provided XmattersConfig -func (t *UpdateConnectorConfig) MergeXmattersConfig(v XmattersConfig) error { - b, err := json.Marshal(v) +// ParseApplyEntityEngineDataviewIndicesResponse parses an HTTP response from a ApplyEntityEngineDataviewIndicesWithResponse call +func ParseApplyEntityEngineDataviewIndicesResponse(rsp *http.Response) (*ApplyEntityEngineDataviewIndicesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ApplyEntityEngineDataviewIndicesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsBedrockSecrets returns the union data inside the UpdateConnectorSecrets as a BedrockSecrets -func (t UpdateConnectorSecrets) AsBedrockSecrets() (BedrockSecrets, error) { - var body BedrockSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Result *[]SecurityEntityAnalyticsAPIEngineDataviewUpdateResult `json:"result,omitempty"` + Success *bool `json:"success,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromBedrockSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided BedrockSecrets -func (t *UpdateConnectorSecrets) FromBedrockSecrets(v BedrockSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 207: + var dest struct { + Errors *[]string `json:"errors,omitempty"` + Result *[]SecurityEntityAnalyticsAPIEngineDataviewUpdateResult `json:"result,omitempty"` + Success *bool `json:"success,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON207 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest struct { + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeBedrockSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided BedrockSecrets -func (t *UpdateConnectorSecrets) MergeBedrockSecrets(v BedrockSecrets) error { - b, err := json.Marshal(v) +// ParseDeleteEntityEngineResponse parses an HTTP response from a DeleteEntityEngineWithResponse call +func ParseDeleteEntityEngineResponse(rsp *http.Response) (*DeleteEntityEngineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteEntityEngineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsCrowdstrikeSecrets returns the union data inside the UpdateConnectorSecrets as a CrowdstrikeSecrets -func (t UpdateConnectorSecrets) AsCrowdstrikeSecrets() (CrowdstrikeSecrets, error) { - var body CrowdstrikeSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Deleted *bool `json:"deleted,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromCrowdstrikeSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided CrowdstrikeSecrets -func (t *UpdateConnectorSecrets) FromCrowdstrikeSecrets(v CrowdstrikeSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeCrowdstrikeSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided CrowdstrikeSecrets -func (t *UpdateConnectorSecrets) MergeCrowdstrikeSecrets(v CrowdstrikeSecrets) error { - b, err := json.Marshal(v) +// ParseGetEntityEngineResponse parses an HTTP response from a GetEntityEngineWithResponse call +func ParseGetEntityEngineResponse(rsp *http.Response) (*GetEntityEngineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetEntityEngineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsD3securitySecrets returns the union data inside the UpdateConnectorSecrets as a D3securitySecrets -func (t UpdateConnectorSecrets) AsD3securitySecrets() (D3securitySecrets, error) { - var body D3securitySecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEntityAnalyticsAPIEngineDescriptor + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromD3securitySecrets overwrites any union data inside the UpdateConnectorSecrets as the provided D3securitySecrets -func (t *UpdateConnectorSecrets) FromD3securitySecrets(v D3securitySecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeD3securitySecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided D3securitySecrets -func (t *UpdateConnectorSecrets) MergeD3securitySecrets(v D3securitySecrets) error { - b, err := json.Marshal(v) +// ParseInitEntityEngineResponse parses an HTTP response from a InitEntityEngineWithResponse call +func ParseInitEntityEngineResponse(rsp *http.Response) (*InitEntityEngineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &InitEntityEngineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsEmailSecrets returns the union data inside the UpdateConnectorSecrets as a EmailSecrets -func (t UpdateConnectorSecrets) AsEmailSecrets() (EmailSecrets, error) { - var body EmailSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEntityAnalyticsAPIEngineDescriptor + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromEmailSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided EmailSecrets -func (t *UpdateConnectorSecrets) FromEmailSecrets(v EmailSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeEmailSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided EmailSecrets -func (t *UpdateConnectorSecrets) MergeEmailSecrets(v EmailSecrets) error { - b, err := json.Marshal(v) +// ParseStartEntityEngineResponse parses an HTTP response from a StartEntityEngineWithResponse call +func ParseStartEntityEngineResponse(rsp *http.Response) (*StartEntityEngineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsGeminiSecrets returns the union data inside the UpdateConnectorSecrets as a GeminiSecrets -func (t UpdateConnectorSecrets) AsGeminiSecrets() (GeminiSecrets, error) { - var body GeminiSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + response := &StartEntityEngineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// FromGeminiSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided GeminiSecrets -func (t *UpdateConnectorSecrets) FromGeminiSecrets(v GeminiSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Started *bool `json:"started,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -// MergeGeminiSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided GeminiSecrets -func (t *UpdateConnectorSecrets) MergeGeminiSecrets(v GeminiSecrets) error { - b, err := json.Marshal(v) +// ParseStopEntityEngineResponse parses an HTTP response from a StopEntityEngineWithResponse call +func ParseStopEntityEngineResponse(rsp *http.Response) (*StopEntityEngineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &StopEntityEngineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsResilientSecrets returns the union data inside the UpdateConnectorSecrets as a ResilientSecrets -func (t UpdateConnectorSecrets) AsResilientSecrets() (ResilientSecrets, error) { - var body ResilientSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Stopped *bool `json:"stopped,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromResilientSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided ResilientSecrets -func (t *UpdateConnectorSecrets) FromResilientSecrets(v ResilientSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeResilientSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided ResilientSecrets -func (t *UpdateConnectorSecrets) MergeResilientSecrets(v ResilientSecrets) error { - b, err := json.Marshal(v) +// ParseListEntitiesResponse parses an HTTP response from a ListEntitiesWithResponse call +func ParseListEntitiesResponse(rsp *http.Response) (*ListEntitiesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ListEntitiesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsJiraSecrets returns the union data inside the UpdateConnectorSecrets as a JiraSecrets -func (t UpdateConnectorSecrets) AsJiraSecrets() (JiraSecrets, error) { - var body JiraSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Inspect *SecurityEntityAnalyticsAPIInspectQuery `json:"inspect,omitempty"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Records []SecurityEntityAnalyticsAPIEntity `json:"records"` + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromJiraSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided JiraSecrets -func (t *UpdateConnectorSecrets) FromJiraSecrets(v JiraSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeJiraSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided JiraSecrets -func (t *UpdateConnectorSecrets) MergeJiraSecrets(v JiraSecrets) error { - b, err := json.Marshal(v) +// ParseGetEntityStoreStatusResponse parses an HTTP response from a GetEntityStoreStatusWithResponse call +func ParseGetEntityStoreStatusResponse(rsp *http.Response) (*GetEntityStoreStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &GetEntityStoreStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsTeamsSecrets returns the union data inside the UpdateConnectorSecrets as a TeamsSecrets -func (t UpdateConnectorSecrets) AsTeamsSecrets() (TeamsSecrets, error) { - var body TeamsSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Engines []struct { + Components *[]SecurityEntityAnalyticsAPIEngineComponentStatus `json:"components,omitempty"` + Delay *string `json:"delay,omitempty"` + DocsPerSecond *int `json:"docsPerSecond,omitempty"` + Error *struct { + Action GetEntityStoreStatus200EnginesErrorAction `json:"action"` + Message string `json:"message"` + } `json:"error,omitempty"` + FieldHistoryLength int `json:"fieldHistoryLength"` + Filter *string `json:"filter,omitempty"` + Frequency *string `json:"frequency,omitempty"` + IndexPattern SecurityEntityAnalyticsAPIIndexPattern `json:"indexPattern"` + LookbackPeriod *string `json:"lookbackPeriod,omitempty"` + Status SecurityEntityAnalyticsAPIEngineStatus `json:"status"` + Timeout *string `json:"timeout,omitempty"` + TimestampField *string `json:"timestampField,omitempty"` + Type SecurityEntityAnalyticsAPIEntityType `json:"type"` + } `json:"engines"` + Status SecurityEntityAnalyticsAPIStoreStatus `json:"status"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromTeamsSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided TeamsSecrets -func (t *UpdateConnectorSecrets) FromTeamsSecrets(v TeamsSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeTeamsSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided TeamsSecrets -func (t *UpdateConnectorSecrets) MergeTeamsSecrets(v TeamsSecrets) error { - b, err := json.Marshal(v) +// ParseDeleteExceptionListResponse parses an HTTP response from a DeleteExceptionListWithResponse call +func ParseDeleteExceptionListResponse(rsp *http.Response) (*DeleteExceptionListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteExceptionListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsGenaiSecrets returns the union data inside the UpdateConnectorSecrets as a GenaiSecrets -func (t UpdateConnectorSecrets) AsGenaiSecrets() (GenaiSecrets, error) { - var body GenaiSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityExceptionsAPIExceptionList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromGenaiSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided GenaiSecrets -func (t *UpdateConnectorSecrets) FromGenaiSecrets(v GenaiSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeGenaiSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided GenaiSecrets -func (t *UpdateConnectorSecrets) MergeGenaiSecrets(v GenaiSecrets) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsOpsgenieSecrets returns the union data inside the UpdateConnectorSecrets as a OpsgenieSecrets -func (t UpdateConnectorSecrets) AsOpsgenieSecrets() (OpsgenieSecrets, error) { - var body OpsgenieSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// FromOpsgenieSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided OpsgenieSecrets -func (t *UpdateConnectorSecrets) FromOpsgenieSecrets(v OpsgenieSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeOpsgenieSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided OpsgenieSecrets -func (t *UpdateConnectorSecrets) MergeOpsgenieSecrets(v OpsgenieSecrets) error { - b, err := json.Marshal(v) +// ParseReadExceptionListResponse parses an HTTP response from a ReadExceptionListWithResponse call +func ParseReadExceptionListResponse(rsp *http.Response) (*ReadExceptionListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ReadExceptionListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsPagerdutySecrets returns the union data inside the UpdateConnectorSecrets as a PagerdutySecrets -func (t UpdateConnectorSecrets) AsPagerdutySecrets() (PagerdutySecrets, error) { - var body PagerdutySecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityExceptionsAPIExceptionList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromPagerdutySecrets overwrites any union data inside the UpdateConnectorSecrets as the provided PagerdutySecrets -func (t *UpdateConnectorSecrets) FromPagerdutySecrets(v PagerdutySecrets) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergePagerdutySecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided PagerdutySecrets -func (t *UpdateConnectorSecrets) MergePagerdutySecrets(v PagerdutySecrets) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsSentineloneSecrets returns the union data inside the UpdateConnectorSecrets as a SentineloneSecrets -func (t UpdateConnectorSecrets) AsSentineloneSecrets() (SentineloneSecrets, error) { - var body SentineloneSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// FromSentineloneSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided SentineloneSecrets -func (t *UpdateConnectorSecrets) FromSentineloneSecrets(v SentineloneSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeSentineloneSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided SentineloneSecrets -func (t *UpdateConnectorSecrets) MergeSentineloneSecrets(v SentineloneSecrets) error { - b, err := json.Marshal(v) +// ParseCreateExceptionListResponse parses an HTTP response from a CreateExceptionListWithResponse call +func ParseCreateExceptionListResponse(rsp *http.Response) (*CreateExceptionListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateExceptionListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsServicenowSecrets returns the union data inside the UpdateConnectorSecrets as a ServicenowSecrets -func (t UpdateConnectorSecrets) AsServicenowSecrets() (ServicenowSecrets, error) { - var body ServicenowSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityExceptionsAPIExceptionList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromServicenowSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided ServicenowSecrets -func (t *UpdateConnectorSecrets) FromServicenowSecrets(v ServicenowSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeServicenowSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided ServicenowSecrets -func (t *UpdateConnectorSecrets) MergeServicenowSecrets(v ServicenowSecrets) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsSlackApiSecrets returns the union data inside the UpdateConnectorSecrets as a SlackApiSecrets -func (t UpdateConnectorSecrets) AsSlackApiSecrets() (SlackApiSecrets, error) { - var body SlackApiSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest -// FromSlackApiSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided SlackApiSecrets -func (t *UpdateConnectorSecrets) FromSlackApiSecrets(v SlackApiSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeSlackApiSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided SlackApiSecrets -func (t *UpdateConnectorSecrets) MergeSlackApiSecrets(v SlackApiSecrets) error { - b, err := json.Marshal(v) +// ParseUpdateExceptionListResponse parses an HTTP response from a UpdateExceptionListWithResponse call +func ParseUpdateExceptionListResponse(rsp *http.Response) (*UpdateExceptionListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &UpdateExceptionListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsSwimlaneSecrets returns the union data inside the UpdateConnectorSecrets as a SwimlaneSecrets -func (t UpdateConnectorSecrets) AsSwimlaneSecrets() (SwimlaneSecrets, error) { - var body SwimlaneSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityExceptionsAPIExceptionList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromSwimlaneSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided SwimlaneSecrets -func (t *UpdateConnectorSecrets) FromSwimlaneSecrets(v SwimlaneSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeSwimlaneSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided SwimlaneSecrets -func (t *UpdateConnectorSecrets) MergeSwimlaneSecrets(v SwimlaneSecrets) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsThehiveSecrets returns the union data inside the UpdateConnectorSecrets as a ThehiveSecrets -func (t UpdateConnectorSecrets) AsThehiveSecrets() (ThehiveSecrets, error) { - var body ThehiveSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// FromThehiveSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided ThehiveSecrets -func (t *UpdateConnectorSecrets) FromThehiveSecrets(v ThehiveSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + + } + + return response, nil } -// MergeThehiveSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided ThehiveSecrets -func (t *UpdateConnectorSecrets) MergeThehiveSecrets(v ThehiveSecrets) error { - b, err := json.Marshal(v) +// ParseDuplicateExceptionListResponse parses an HTTP response from a DuplicateExceptionListWithResponse call +func ParseDuplicateExceptionListResponse(rsp *http.Response) (*DuplicateExceptionListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DuplicateExceptionListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsTinesSecrets returns the union data inside the UpdateConnectorSecrets as a TinesSecrets -func (t UpdateConnectorSecrets) AsTinesSecrets() (TinesSecrets, error) { - var body TinesSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityExceptionsAPIExceptionList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromTinesSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided TinesSecrets -func (t *UpdateConnectorSecrets) FromTinesSecrets(v TinesSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeTinesSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided TinesSecrets -func (t *UpdateConnectorSecrets) MergeTinesSecrets(v TinesSecrets) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 405: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON405 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// AsTorqSecrets returns the union data inside the UpdateConnectorSecrets as a TorqSecrets -func (t UpdateConnectorSecrets) AsTorqSecrets() (TorqSecrets, error) { - var body TorqSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromTorqSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided TorqSecrets -func (t *UpdateConnectorSecrets) FromTorqSecrets(v TorqSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeTorqSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided TorqSecrets -func (t *UpdateConnectorSecrets) MergeTorqSecrets(v TorqSecrets) error { - b, err := json.Marshal(v) +// ParseExportExceptionListResponse parses an HTTP response from a ExportExceptionListWithResponse call +func ParseExportExceptionListResponse(rsp *http.Response) (*ExportExceptionListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ExportExceptionListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsWebhookSecrets returns the union data inside the UpdateConnectorSecrets as a WebhookSecrets -func (t UpdateConnectorSecrets) AsWebhookSecrets() (WebhookSecrets, error) { - var body WebhookSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// FromWebhookSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided WebhookSecrets -func (t *UpdateConnectorSecrets) FromWebhookSecrets(v WebhookSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// MergeWebhookSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided WebhookSecrets -func (t *UpdateConnectorSecrets) MergeWebhookSecrets(v WebhookSecrets) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// AsCasesWebhookSecrets returns the union data inside the UpdateConnectorSecrets as a CasesWebhookSecrets -func (t UpdateConnectorSecrets) AsCasesWebhookSecrets() (CasesWebhookSecrets, error) { - var body CasesWebhookSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// FromCasesWebhookSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided CasesWebhookSecrets -func (t *UpdateConnectorSecrets) FromCasesWebhookSecrets(v CasesWebhookSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeCasesWebhookSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided CasesWebhookSecrets -func (t *UpdateConnectorSecrets) MergeCasesWebhookSecrets(v CasesWebhookSecrets) error { - b, err := json.Marshal(v) +// ParseFindExceptionListsResponse parses an HTTP response from a FindExceptionListsWithResponse call +func ParseFindExceptionListsResponse(rsp *http.Response) (*FindExceptionListsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &FindExceptionListsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsXmattersSecrets returns the union data inside the UpdateConnectorSecrets as a XmattersSecrets -func (t UpdateConnectorSecrets) AsXmattersSecrets() (XmattersSecrets, error) { - var body XmattersSecrets - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Data []SecurityExceptionsAPIExceptionList `json:"data"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromXmattersSecrets overwrites any union data inside the UpdateConnectorSecrets as the provided XmattersSecrets -func (t *UpdateConnectorSecrets) FromXmattersSecrets(v XmattersSecrets) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeXmattersSecrets performs a merge with any union data inside the UpdateConnectorSecrets, using the provided XmattersSecrets -func (t *UpdateConnectorSecrets) MergeXmattersSecrets(v XmattersSecrets) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsUpdateOutputElasticsearchSecretsSslKey0 returns the union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key as a UpdateOutputElasticsearchSecretsSslKey0 -func (t UpdateOutputElasticsearch_Secrets_Ssl_Key) AsUpdateOutputElasticsearchSecretsSslKey0() (UpdateOutputElasticsearchSecretsSslKey0, error) { - var body UpdateOutputElasticsearchSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// FromUpdateOutputElasticsearchSecretsSslKey0 overwrites any union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key as the provided UpdateOutputElasticsearchSecretsSslKey0 -func (t *UpdateOutputElasticsearch_Secrets_Ssl_Key) FromUpdateOutputElasticsearchSecretsSslKey0(v UpdateOutputElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err + } + + return response, nil } -// MergeUpdateOutputElasticsearchSecretsSslKey0 performs a merge with any union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key, using the provided UpdateOutputElasticsearchSecretsSslKey0 -func (t *UpdateOutputElasticsearch_Secrets_Ssl_Key) MergeUpdateOutputElasticsearchSecretsSslKey0(v UpdateOutputElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) +// ParseImportExceptionListResponse parses an HTTP response from a ImportExceptionListWithResponse call +func ParseImportExceptionListResponse(rsp *http.Response) (*ImportExceptionListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsUpdateOutputElasticsearchSecretsSslKey1 returns the union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key as a UpdateOutputElasticsearchSecretsSslKey1 -func (t UpdateOutputElasticsearch_Secrets_Ssl_Key) AsUpdateOutputElasticsearchSecretsSslKey1() (UpdateOutputElasticsearchSecretsSslKey1, error) { - var body UpdateOutputElasticsearchSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} + response := &ImportExceptionListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// FromUpdateOutputElasticsearchSecretsSslKey1 overwrites any union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key as the provided UpdateOutputElasticsearchSecretsSslKey1 -func (t *UpdateOutputElasticsearch_Secrets_Ssl_Key) FromUpdateOutputElasticsearchSecretsSslKey1(v UpdateOutputElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Errors SecurityExceptionsAPIExceptionListsImportBulkErrorArray `json:"errors"` + Success bool `json:"success"` + SuccessCount int `json:"success_count"` + SuccessCountExceptionListItems int `json:"success_count_exception_list_items"` + SuccessCountExceptionLists int `json:"success_count_exception_lists"` + SuccessExceptionListItems bool `json:"success_exception_list_items"` + SuccessExceptionLists bool `json:"success_exception_lists"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// MergeUpdateOutputElasticsearchSecretsSslKey1 performs a merge with any union data inside the UpdateOutputElasticsearch_Secrets_Ssl_Key, using the provided UpdateOutputElasticsearchSecretsSslKey1 -func (t *UpdateOutputElasticsearch_Secrets_Ssl_Key) MergeUpdateOutputElasticsearchSecretsSslKey1(v UpdateOutputElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -func (t UpdateOutputElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -func (t *UpdateOutputElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// AsUpdateOutputKafkaSecretsPassword0 returns the union data inside the UpdateOutputKafka_Secrets_Password as a UpdateOutputKafkaSecretsPassword0 -func (t UpdateOutputKafka_Secrets_Password) AsUpdateOutputKafkaSecretsPassword0() (UpdateOutputKafkaSecretsPassword0, error) { - var body UpdateOutputKafkaSecretsPassword0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromUpdateOutputKafkaSecretsPassword0 overwrites any union data inside the UpdateOutputKafka_Secrets_Password as the provided UpdateOutputKafkaSecretsPassword0 -func (t *UpdateOutputKafka_Secrets_Password) FromUpdateOutputKafkaSecretsPassword0(v UpdateOutputKafkaSecretsPassword0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeUpdateOutputKafkaSecretsPassword0 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Password, using the provided UpdateOutputKafkaSecretsPassword0 -func (t *UpdateOutputKafka_Secrets_Password) MergeUpdateOutputKafkaSecretsPassword0(v UpdateOutputKafkaSecretsPassword0) error { - b, err := json.Marshal(v) +// ParseDeleteExceptionListItemResponse parses an HTTP response from a DeleteExceptionListItemWithResponse call +func ParseDeleteExceptionListItemResponse(rsp *http.Response) (*DeleteExceptionListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &DeleteExceptionListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsUpdateOutputKafkaSecretsPassword1 returns the union data inside the UpdateOutputKafka_Secrets_Password as a UpdateOutputKafkaSecretsPassword1 -func (t UpdateOutputKafka_Secrets_Password) AsUpdateOutputKafkaSecretsPassword1() (UpdateOutputKafkaSecretsPassword1, error) { - var body UpdateOutputKafkaSecretsPassword1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityExceptionsAPIExceptionListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromUpdateOutputKafkaSecretsPassword1 overwrites any union data inside the UpdateOutputKafka_Secrets_Password as the provided UpdateOutputKafkaSecretsPassword1 -func (t *UpdateOutputKafka_Secrets_Password) FromUpdateOutputKafkaSecretsPassword1(v UpdateOutputKafkaSecretsPassword1) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeUpdateOutputKafkaSecretsPassword1 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Password, using the provided UpdateOutputKafkaSecretsPassword1 -func (t *UpdateOutputKafka_Secrets_Password) MergeUpdateOutputKafkaSecretsPassword1(v UpdateOutputKafkaSecretsPassword1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -func (t UpdateOutputKafka_Secrets_Password) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -func (t *UpdateOutputKafka_Secrets_Password) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// AsUpdateOutputKafkaSecretsSslKey0 returns the union data inside the UpdateOutputKafka_Secrets_Ssl_Key as a UpdateOutputKafkaSecretsSslKey0 -func (t UpdateOutputKafka_Secrets_Ssl_Key) AsUpdateOutputKafkaSecretsSslKey0() (UpdateOutputKafkaSecretsSslKey0, error) { - var body UpdateOutputKafkaSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromUpdateOutputKafkaSecretsSslKey0 overwrites any union data inside the UpdateOutputKafka_Secrets_Ssl_Key as the provided UpdateOutputKafkaSecretsSslKey0 -func (t *UpdateOutputKafka_Secrets_Ssl_Key) FromUpdateOutputKafkaSecretsSslKey0(v UpdateOutputKafkaSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeUpdateOutputKafkaSecretsSslKey0 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Ssl_Key, using the provided UpdateOutputKafkaSecretsSslKey0 -func (t *UpdateOutputKafka_Secrets_Ssl_Key) MergeUpdateOutputKafkaSecretsSslKey0(v UpdateOutputKafkaSecretsSslKey0) error { - b, err := json.Marshal(v) +// ParseReadExceptionListItemResponse parses an HTTP response from a ReadExceptionListItemWithResponse call +func ParseReadExceptionListItemResponse(rsp *http.Response) (*ReadExceptionListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ReadExceptionListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsUpdateOutputKafkaSecretsSslKey1 returns the union data inside the UpdateOutputKafka_Secrets_Ssl_Key as a UpdateOutputKafkaSecretsSslKey1 -func (t UpdateOutputKafka_Secrets_Ssl_Key) AsUpdateOutputKafkaSecretsSslKey1() (UpdateOutputKafkaSecretsSslKey1, error) { - var body UpdateOutputKafkaSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityExceptionsAPIExceptionListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromUpdateOutputKafkaSecretsSslKey1 overwrites any union data inside the UpdateOutputKafka_Secrets_Ssl_Key as the provided UpdateOutputKafkaSecretsSslKey1 -func (t *UpdateOutputKafka_Secrets_Ssl_Key) FromUpdateOutputKafkaSecretsSslKey1(v UpdateOutputKafkaSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeUpdateOutputKafkaSecretsSslKey1 performs a merge with any union data inside the UpdateOutputKafka_Secrets_Ssl_Key, using the provided UpdateOutputKafkaSecretsSslKey1 -func (t *UpdateOutputKafka_Secrets_Ssl_Key) MergeUpdateOutputKafkaSecretsSslKey1(v UpdateOutputKafkaSecretsSslKey1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -func (t UpdateOutputKafka_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -func (t *UpdateOutputKafka_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// AsUpdateOutputLogstashSecretsSslKey0 returns the union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as a UpdateOutputLogstashSecretsSslKey0 -func (t UpdateOutputLogstash_Secrets_Ssl_Key) AsUpdateOutputLogstashSecretsSslKey0() (UpdateOutputLogstashSecretsSslKey0, error) { - var body UpdateOutputLogstashSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromUpdateOutputLogstashSecretsSslKey0 overwrites any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as the provided UpdateOutputLogstashSecretsSslKey0 -func (t *UpdateOutputLogstash_Secrets_Ssl_Key) FromUpdateOutputLogstashSecretsSslKey0(v UpdateOutputLogstashSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeUpdateOutputLogstashSecretsSslKey0 performs a merge with any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key, using the provided UpdateOutputLogstashSecretsSslKey0 -func (t *UpdateOutputLogstash_Secrets_Ssl_Key) MergeUpdateOutputLogstashSecretsSslKey0(v UpdateOutputLogstashSecretsSslKey0) error { - b, err := json.Marshal(v) +// ParseCreateExceptionListItemResponse parses an HTTP response from a CreateExceptionListItemWithResponse call +func ParseCreateExceptionListItemResponse(rsp *http.Response) (*CreateExceptionListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateExceptionListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsUpdateOutputLogstashSecretsSslKey1 returns the union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as a UpdateOutputLogstashSecretsSslKey1 -func (t UpdateOutputLogstash_Secrets_Ssl_Key) AsUpdateOutputLogstashSecretsSslKey1() (UpdateOutputLogstashSecretsSslKey1, error) { - var body UpdateOutputLogstashSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityExceptionsAPIExceptionListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromUpdateOutputLogstashSecretsSslKey1 overwrites any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key as the provided UpdateOutputLogstashSecretsSslKey1 -func (t *UpdateOutputLogstash_Secrets_Ssl_Key) FromUpdateOutputLogstashSecretsSslKey1(v UpdateOutputLogstashSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeUpdateOutputLogstashSecretsSslKey1 performs a merge with any union data inside the UpdateOutputLogstash_Secrets_Ssl_Key, using the provided UpdateOutputLogstashSecretsSslKey1 -func (t *UpdateOutputLogstash_Secrets_Ssl_Key) MergeUpdateOutputLogstashSecretsSslKey1(v UpdateOutputLogstashSecretsSslKey1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -func (t UpdateOutputLogstash_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest -func (t *UpdateOutputLogstash_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// AsUpdateOutputRemoteElasticsearchSecretsServiceToken0 returns the union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as a UpdateOutputRemoteElasticsearchSecretsServiceToken0 -func (t UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) AsUpdateOutputRemoteElasticsearchSecretsServiceToken0() (UpdateOutputRemoteElasticsearchSecretsServiceToken0, error) { - var body UpdateOutputRemoteElasticsearchSecretsServiceToken0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromUpdateOutputRemoteElasticsearchSecretsServiceToken0 overwrites any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as the provided UpdateOutputRemoteElasticsearchSecretsServiceToken0 -func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) FromUpdateOutputRemoteElasticsearchSecretsServiceToken0(v UpdateOutputRemoteElasticsearchSecretsServiceToken0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeUpdateOutputRemoteElasticsearchSecretsServiceToken0 performs a merge with any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided UpdateOutputRemoteElasticsearchSecretsServiceToken0 -func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) MergeUpdateOutputRemoteElasticsearchSecretsServiceToken0(v UpdateOutputRemoteElasticsearchSecretsServiceToken0) error { - b, err := json.Marshal(v) +// ParseUpdateExceptionListItemResponse parses an HTTP response from a UpdateExceptionListItemWithResponse call +func ParseUpdateExceptionListItemResponse(rsp *http.Response) (*UpdateExceptionListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &UpdateExceptionListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsUpdateOutputRemoteElasticsearchSecretsServiceToken1 returns the union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as a UpdateOutputRemoteElasticsearchSecretsServiceToken1 -func (t UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) AsUpdateOutputRemoteElasticsearchSecretsServiceToken1() (UpdateOutputRemoteElasticsearchSecretsServiceToken1, error) { - var body UpdateOutputRemoteElasticsearchSecretsServiceToken1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityExceptionsAPIExceptionListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromUpdateOutputRemoteElasticsearchSecretsServiceToken1 overwrites any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken as the provided UpdateOutputRemoteElasticsearchSecretsServiceToken1 -func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) FromUpdateOutputRemoteElasticsearchSecretsServiceToken1(v UpdateOutputRemoteElasticsearchSecretsServiceToken1) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeUpdateOutputRemoteElasticsearchSecretsServiceToken1 performs a merge with any union data inside the UpdateOutputRemoteElasticsearch_Secrets_ServiceToken, using the provided UpdateOutputRemoteElasticsearchSecretsServiceToken1 -func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) MergeUpdateOutputRemoteElasticsearchSecretsServiceToken1(v UpdateOutputRemoteElasticsearchSecretsServiceToken1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -func (t UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -func (t *UpdateOutputRemoteElasticsearch_Secrets_ServiceToken) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// AsUpdateOutputRemoteElasticsearchSecretsSslKey0 returns the union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key as a UpdateOutputRemoteElasticsearchSecretsSslKey0 -func (t UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) AsUpdateOutputRemoteElasticsearchSecretsSslKey0() (UpdateOutputRemoteElasticsearchSecretsSslKey0, error) { - var body UpdateOutputRemoteElasticsearchSecretsSslKey0 - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromUpdateOutputRemoteElasticsearchSecretsSslKey0 overwrites any union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided UpdateOutputRemoteElasticsearchSecretsSslKey0 -func (t *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) FromUpdateOutputRemoteElasticsearchSecretsSslKey0(v UpdateOutputRemoteElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeUpdateOutputRemoteElasticsearchSecretsSslKey0 performs a merge with any union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided UpdateOutputRemoteElasticsearchSecretsSslKey0 -func (t *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeUpdateOutputRemoteElasticsearchSecretsSslKey0(v UpdateOutputRemoteElasticsearchSecretsSslKey0) error { - b, err := json.Marshal(v) +// ParseFindExceptionListItemsResponse parses an HTTP response from a FindExceptionListItemsWithResponse call +func ParseFindExceptionListItemsResponse(rsp *http.Response) (*FindExceptionListItemsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &FindExceptionListItemsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsUpdateOutputRemoteElasticsearchSecretsSslKey1 returns the union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key as a UpdateOutputRemoteElasticsearchSecretsSslKey1 -func (t UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) AsUpdateOutputRemoteElasticsearchSecretsSslKey1() (UpdateOutputRemoteElasticsearchSecretsSslKey1, error) { - var body UpdateOutputRemoteElasticsearchSecretsSslKey1 - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Data []SecurityExceptionsAPIExceptionListItem `json:"data"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Pit *string `json:"pit,omitempty"` + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromUpdateOutputRemoteElasticsearchSecretsSslKey1 overwrites any union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key as the provided UpdateOutputRemoteElasticsearchSecretsSslKey1 -func (t *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) FromUpdateOutputRemoteElasticsearchSecretsSslKey1(v UpdateOutputRemoteElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeUpdateOutputRemoteElasticsearchSecretsSslKey1 performs a merge with any union data inside the UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key, using the provided UpdateOutputRemoteElasticsearchSecretsSslKey1 -func (t *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) MergeUpdateOutputRemoteElasticsearchSecretsSslKey1(v UpdateOutputRemoteElasticsearchSecretsSslKey1) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -func (t UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -func (t *UpdateOutputRemoteElasticsearch_Secrets_Ssl_Key) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// AsUpdateOutputElasticsearch returns the union data inside the UpdateOutputUnion as a UpdateOutputElasticsearch -func (t UpdateOutputUnion) AsUpdateOutputElasticsearch() (UpdateOutputElasticsearch, error) { - var body UpdateOutputElasticsearch - err := json.Unmarshal(t.union, &body) - return body, err -} + } -// FromUpdateOutputElasticsearch overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputElasticsearch -func (t *UpdateOutputUnion) FromUpdateOutputElasticsearch(v UpdateOutputElasticsearch) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeUpdateOutputElasticsearch performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputElasticsearch -func (t *UpdateOutputUnion) MergeUpdateOutputElasticsearch(v UpdateOutputElasticsearch) error { - b, err := json.Marshal(v) +// ParseReadExceptionListSummaryResponse parses an HTTP response from a ReadExceptionListSummaryWithResponse call +func ParseReadExceptionListSummaryResponse(rsp *http.Response) (*ReadExceptionListSummaryResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &ReadExceptionListSummaryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// AsUpdateOutputRemoteElasticsearch returns the union data inside the UpdateOutputUnion as a UpdateOutputRemoteElasticsearch -func (t UpdateOutputUnion) AsUpdateOutputRemoteElasticsearch() (UpdateOutputRemoteElasticsearch, error) { - var body UpdateOutputRemoteElasticsearch - err := json.Unmarshal(t.union, &body) - return body, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Linux *int `json:"linux,omitempty"` + Macos *int `json:"macos,omitempty"` + Total *int `json:"total,omitempty"` + Windows *int `json:"windows,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// FromUpdateOutputRemoteElasticsearch overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputRemoteElasticsearch -func (t *UpdateOutputUnion) FromUpdateOutputRemoteElasticsearch(v UpdateOutputRemoteElasticsearch) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// MergeUpdateOutputRemoteElasticsearch performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputRemoteElasticsearch -func (t *UpdateOutputUnion) MergeUpdateOutputRemoteElasticsearch(v UpdateOutputRemoteElasticsearch) error { - b, err := json.Marshal(v) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// AsUpdateOutputLogstash returns the union data inside the UpdateOutputUnion as a UpdateOutputLogstash -func (t UpdateOutputUnion) AsUpdateOutputLogstash() (UpdateOutputLogstash, error) { - var body UpdateOutputLogstash - err := json.Unmarshal(t.union, &body) - return body, err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest -// FromUpdateOutputLogstash overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputLogstash -func (t *UpdateOutputUnion) FromUpdateOutputLogstash(v UpdateOutputLogstash) error { - b, err := json.Marshal(v) - t.union = b - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest -// MergeUpdateOutputLogstash performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputLogstash -func (t *UpdateOutputUnion) MergeUpdateOutputLogstash(v UpdateOutputLogstash) error { - b, err := json.Marshal(v) - if err != nil { - return err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} - -// AsUpdateOutputKafka returns the union data inside the UpdateOutputUnion as a UpdateOutputKafka -func (t UpdateOutputUnion) AsUpdateOutputKafka() (UpdateOutputKafka, error) { - var body UpdateOutputKafka - err := json.Unmarshal(t.union, &body) - return body, err -} - -// FromUpdateOutputKafka overwrites any union data inside the UpdateOutputUnion as the provided UpdateOutputKafka -func (t *UpdateOutputUnion) FromUpdateOutputKafka(v UpdateOutputKafka) error { - b, err := json.Marshal(v) - t.union = b - return err + return response, nil } -// MergeUpdateOutputKafka performs a merge with any union data inside the UpdateOutputUnion, using the provided UpdateOutputKafka -func (t *UpdateOutputUnion) MergeUpdateOutputKafka(v UpdateOutputKafka) error { - b, err := json.Marshal(v) +// ParseCreateSharedExceptionListResponse parses an HTTP response from a CreateSharedExceptionListWithResponse call +func ParseCreateSharedExceptionListResponse(rsp *http.Response) (*CreateSharedExceptionListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - merged, err := runtime.JSONMerge(t.union, b) - t.union = merged - return err -} + response := &CreateSharedExceptionListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -func (t UpdateOutputUnion) MarshalJSON() ([]byte, error) { - b, err := t.union.MarshalJSON() - return b, err -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityExceptionsAPIExceptionList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -func (t *UpdateOutputUnion) UnmarshalJSON(b []byte) error { - err := t.union.UnmarshalJSON(b) - return err -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// Override default JSON handling for CreateConnectorConfig to handle AdditionalProperties and union -func (a *CreateConnectorConfig) UnmarshalJSON(b []byte) error { - err := a.union.UnmarshalJSON(b) - if err != nil { - return err - } - object := make(map[string]json.RawMessage) - err = json.Unmarshal(b, &object) - if err != nil { - return err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityExceptionsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } - } - return nil -} + response.JSON403 = &dest -// Override default JSON handling for CreateConnectorConfig to handle AdditionalProperties and union -func (a CreateConnectorConfig) MarshalJSON() ([]byte, error) { - var err error - b, err := a.union.MarshalJSON() - if err != nil { - return nil, err - } - object := make(map[string]json.RawMessage) - if a.union != nil { - err = json.Unmarshal(b, &object) - if err != nil { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - } + response.JSON409 = &dest - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityExceptionsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON500 = &dest + } - return json.Marshal(object) + + return response, nil } -// Override default JSON handling for CreateConnectorSecrets to handle AdditionalProperties and union -func (a *CreateConnectorSecrets) UnmarshalJSON(b []byte) error { - err := a.union.UnmarshalJSON(b) +// ParseGetFeaturesResponse parses an HTTP response from a GetFeaturesWithResponse call +func ParseGetFeaturesResponse(rsp *http.Response) (*GetFeaturesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - object := make(map[string]json.RawMessage) - err = json.Unmarshal(b, &object) - if err != nil { - return err + + response := &GetFeaturesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest + } - return nil + + return response, nil } -// Override default JSON handling for CreateConnectorSecrets to handle AdditionalProperties and union -func (a CreateConnectorSecrets) MarshalJSON() ([]byte, error) { - var err error - b, err := a.union.MarshalJSON() +// ParseGetFleetAgentDownloadSourcesResponse parses an HTTP response from a GetFleetAgentDownloadSourcesWithResponse call +func ParseGetFleetAgentDownloadSourcesResponse(rsp *http.Response) (*GetFleetAgentDownloadSourcesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - object := make(map[string]json.RawMessage) - if a.union != nil { - err = json.Unmarshal(b, &object) - if err != nil { + + response := &GetFleetAgentDownloadSourcesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + Host string `json:"host"` + Id string `json:"id"` + IsDefault *bool `json:"is_default,omitempty"` + Name string `json:"name"` + + // ProxyId The ID of the proxy to use for this download source. See the proxies API for more information. + ProxyId *string `json:"proxy_id"` + Secrets *struct { + Ssl *struct { + Key *GetFleetAgentDownloadSources_200_Items_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - } + response.JSON200 = &dest - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON400 = &dest + } - return json.Marshal(object) + + return response, nil } -// Override default JSON handling for UpdateConnectorConfig to handle AdditionalProperties and union -func (a *UpdateConnectorConfig) UnmarshalJSON(b []byte) error { - err := a.union.UnmarshalJSON(b) +// ParsePostFleetAgentDownloadSourcesResponse parses an HTTP response from a PostFleetAgentDownloadSourcesWithResponse call +func ParsePostFleetAgentDownloadSourcesResponse(rsp *http.Response) (*PostFleetAgentDownloadSourcesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - object := make(map[string]json.RawMessage) - err = json.Unmarshal(b, &object) - if err != nil { - return err + + response := &PostFleetAgentDownloadSourcesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + Host string `json:"host"` + Id string `json:"id"` + IsDefault *bool `json:"is_default,omitempty"` + Name string `json:"name"` + + // ProxyId The ID of the proxy to use for this download source. See the proxies API for more information. + ProxyId *string `json:"proxy_id"` + Secrets *struct { + Ssl *struct { + Key *PostFleetAgentDownloadSources_200_Item_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"item"` } - } - return nil -} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// Override default JSON handling for UpdateConnectorConfig to handle AdditionalProperties and union -func (a UpdateConnectorConfig) MarshalJSON() ([]byte, error) { - var err error - b, err := a.union.MarshalJSON() - if err != nil { - return nil, err - } - object := make(map[string]json.RawMessage) - if a.union != nil { - err = json.Unmarshal(b, &object) - if err != nil { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - } + response.JSON400 = &dest - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) - } } - return json.Marshal(object) + + return response, nil } -// Override default JSON handling for UpdateConnectorSecrets to handle AdditionalProperties and union -func (a *UpdateConnectorSecrets) UnmarshalJSON(b []byte) error { - err := a.union.UnmarshalJSON(b) +// ParseDeleteFleetAgentDownloadSourcesSourceidResponse parses an HTTP response from a DeleteFleetAgentDownloadSourcesSourceidWithResponse call +func ParseDeleteFleetAgentDownloadSourcesSourceidResponse(rsp *http.Response) (*DeleteFleetAgentDownloadSourcesSourceidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { - return err + return nil, err } - object := make(map[string]json.RawMessage) - err = json.Unmarshal(b, &object) - if err != nil { - return err + + response := &DeleteFleetAgentDownloadSourcesSourceidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if len(object) != 0 { - a.AdditionalProperties = make(map[string]interface{}) - for fieldName, fieldBuf := range object { - var fieldVal interface{} - err := json.Unmarshal(fieldBuf, &fieldVal) - if err != nil { - return fmt.Errorf("error unmarshaling field %s: %w", fieldName, err) - } - a.AdditionalProperties[fieldName] = fieldVal + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Id string `json:"id"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON400 = &dest + } - return nil + + return response, nil } -// Override default JSON handling for UpdateConnectorSecrets to handle AdditionalProperties and union -func (a UpdateConnectorSecrets) MarshalJSON() ([]byte, error) { - var err error - b, err := a.union.MarshalJSON() +// ParseGetFleetAgentDownloadSourcesSourceidResponse parses an HTTP response from a GetFleetAgentDownloadSourcesSourceidWithResponse call +func ParseGetFleetAgentDownloadSourcesSourceidResponse(rsp *http.Response) (*GetFleetAgentDownloadSourcesSourceidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - object := make(map[string]json.RawMessage) - if a.union != nil { - err = json.Unmarshal(b, &object) - if err != nil { + + response := &GetFleetAgentDownloadSourcesSourceidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + Host string `json:"host"` + Id string `json:"id"` + IsDefault *bool `json:"is_default,omitempty"` + Name string `json:"name"` + + // ProxyId The ID of the proxy to use for this download source. See the proxies API for more information. + ProxyId *string `json:"proxy_id"` + Secrets *struct { + Ssl *struct { + Key *GetFleetAgentDownloadSourcesSourceid_200_Item_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - } + response.JSON200 = &dest - for fieldName, field := range a.AdditionalProperties { - object[fieldName], err = json.Marshal(field) - if err != nil { - return nil, fmt.Errorf("error marshaling '%s': %w", fieldName, err) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON400 = &dest + } - return json.Marshal(object) + + return response, nil } -// RequestEditorFn is the function signature for the RequestEditor callback function -type RequestEditorFn func(ctx context.Context, req *http.Request) error +// ParsePutFleetAgentDownloadSourcesSourceidResponse parses an HTTP response from a PutFleetAgentDownloadSourcesSourceidWithResponse call +func ParsePutFleetAgentDownloadSourcesSourceidResponse(rsp *http.Response) (*PutFleetAgentDownloadSourcesSourceidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } -// Doer performs HTTP requests. -// -// The standard http.Client implements this interface. -type HttpRequestDoer interface { - Do(req *http.Request) (*http.Response, error) -} + response := &PutFleetAgentDownloadSourcesSourceidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// Client which conforms to the OpenAPI3 specification for this service. -type Client struct { - // The endpoint of the server conforming to this interface, with scheme, - // https://api.deepmap.com for example. This can contain a path relative - // to the server, such as https://api.deepmap.com/dev-test, and all the - // paths in the swagger spec will be appended to the server. - Server string + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + Host string `json:"host"` + Id string `json:"id"` + IsDefault *bool `json:"is_default,omitempty"` + Name string `json:"name"` + + // ProxyId The ID of the proxy to use for this download source. See the proxies API for more information. + ProxyId *string `json:"proxy_id"` + Secrets *struct { + Ssl *struct { + Key *PutFleetAgentDownloadSourcesSourceid_200_Item_Secrets_Ssl_Key `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"secrets,omitempty"` + Ssl *struct { + Certificate *string `json:"certificate,omitempty"` + CertificateAuthorities *[]string `json:"certificate_authorities,omitempty"` + Key *string `json:"key,omitempty"` + } `json:"ssl,omitempty"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // Doer for performing requests, typically a *http.Client with any - // customized settings, such as certificate chains. - Client HttpRequestDoer + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // A list of callbacks for modifying requests which are generated before sending over - // the network. - RequestEditors []RequestEditorFn + } + + return response, nil } -// ClientOption allows setting custom parameters during construction -type ClientOption func(*Client) error +// ParseGetFleetAgentPoliciesResponse parses an HTTP response from a GetFleetAgentPoliciesWithResponse call +func ParseGetFleetAgentPoliciesResponse(rsp *http.Response) (*GetFleetAgentPoliciesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } -// Creates a new Client, with reasonable defaults -func NewClient(server string, opts ...ClientOption) (*Client, error) { - // create a client with sane default values - client := Client{ - Server: server, + response := &GetFleetAgentPoliciesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - // mutate client and add all optional params - for _, o := range opts { - if err := o(&client); err != nil { + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []AgentPolicy `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - } - // ensure the server URL always has a trailing slash - if !strings.HasSuffix(client.Server, "/") { - client.Server += "/" - } - // create httpClient, if not already present - if client.Client == nil { - client.Client = &http.Client{} - } - return &client, nil -} + response.JSON200 = &dest -// WithHTTPClient allows overriding the default Doer, which is -// automatically created using http.Client. This is useful for tests. -func WithHTTPClient(doer HttpRequestDoer) ClientOption { - return func(c *Client) error { - c.Client = doer - return nil - } -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// WithRequestEditorFn allows setting up a callback function, which will be -// called right before sending the request. This can be used to mutate the request. -func WithRequestEditorFn(fn RequestEditorFn) ClientOption { - return func(c *Client) error { - c.RequestEditors = append(c.RequestEditors, fn) - return nil } + + return response, nil } -// The interface specification for the client above. -type ClientInterface interface { - // DeleteAgentConfigurationWithBody request with any body - DeleteAgentConfigurationWithBody(ctx context.Context, params *DeleteAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) +// ParsePostFleetAgentPoliciesResponse parses an HTTP response from a PostFleetAgentPoliciesWithResponse call +func ParsePostFleetAgentPoliciesResponse(rsp *http.Response) (*PostFleetAgentPoliciesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - DeleteAgentConfiguration(ctx context.Context, params *DeleteAgentConfigurationParams, body DeleteAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + response := &PostFleetAgentPoliciesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // GetAgentConfigurations request - GetAgentConfigurations(ctx context.Context, params *GetAgentConfigurationsParams, reqEditors ...RequestEditorFn) (*http.Response, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item AgentPolicy `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // CreateUpdateAgentConfigurationWithBody request with any body - CreateUpdateAgentConfigurationWithBody(ctx context.Context, params *CreateUpdateAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - CreateUpdateAgentConfiguration(ctx context.Context, params *CreateUpdateAgentConfigurationParams, body CreateUpdateAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + } - // GetFleetAgentPolicies request - GetFleetAgentPolicies(ctx context.Context, params *GetFleetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + return response, nil +} - // PostFleetAgentPoliciesWithBody request with any body - PostFleetAgentPoliciesWithBody(ctx context.Context, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) +// ParsePostFleetAgentPoliciesBulkGetResponse parses an HTTP response from a PostFleetAgentPoliciesBulkGetWithResponse call +func ParsePostFleetAgentPoliciesBulkGetResponse(rsp *http.Response) (*PostFleetAgentPoliciesBulkGetResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - PostFleetAgentPolicies(ctx context.Context, params *PostFleetAgentPoliciesParams, body PostFleetAgentPoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + response := &PostFleetAgentPoliciesBulkGetResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // PostFleetAgentPoliciesDeleteWithBody request with any body - PostFleetAgentPoliciesDeleteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + AdvancedSettings *struct { + AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory"` + AgentDownloadTimeout interface{} `json:"agent_download_timeout"` + AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs"` + AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval"` + AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles"` + AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes"` + AgentLoggingLevel interface{} `json:"agent_logging_level"` + AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period"` + AgentLoggingToFiles interface{} `json:"agent_logging_to_files"` + AgentMonitoringRuntimeExperimental interface{} `json:"agent_monitoring_runtime_experimental"` + } `json:"advanced_settings,omitempty"` + AgentFeatures *[]struct { + Enabled bool `json:"enabled"` + Name string `json:"name"` + } `json:"agent_features,omitempty"` + Agentless *struct { + CloudConnectors *struct { + Enabled bool `json:"enabled"` + TargetCsp *string `json:"target_csp,omitempty"` + } `json:"cloud_connectors,omitempty"` + Resources *struct { + Requests *struct { + Cpu *string `json:"cpu,omitempty"` + Memory *string `json:"memory,omitempty"` + } `json:"requests,omitempty"` + } `json:"resources,omitempty"` + } `json:"agentless,omitempty"` + Agents *float32 `json:"agents,omitempty"` + DataOutputId *string `json:"data_output_id"` + Description *string `json:"description,omitempty"` + DownloadSourceId *string `json:"download_source_id"` + FleetServerHostId *string `json:"fleet_server_host_id"` + + // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. + GlobalDataTags *[]struct { + Name string `json:"name"` + Value PostFleetAgentPoliciesBulkGet_200_Items_GlobalDataTags_Value `json:"value"` + } `json:"global_data_tags,omitempty"` + HasFleetServer *bool `json:"has_fleet_server,omitempty"` + Id string `json:"id"` + InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` + IsManaged bool `json:"is_managed"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + + // IsProtected Indicates whether the agent policy has tamper protection enabled. Default false. + IsProtected bool `json:"is_protected"` + + // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled + KeepMonitoringAlive *bool `json:"keep_monitoring_alive"` + MonitoringDiagnostics *struct { + Limit *struct { + Burst *float32 `json:"burst,omitempty"` + Interval *string `json:"interval,omitempty"` + } `json:"limit,omitempty"` + Uploader *struct { + InitDur *string `json:"init_dur,omitempty"` + MaxDur *string `json:"max_dur,omitempty"` + MaxRetries *float32 `json:"max_retries,omitempty"` + } `json:"uploader,omitempty"` + } `json:"monitoring_diagnostics,omitempty"` + MonitoringEnabled *[]PostFleetAgentPoliciesBulkGet200ItemsMonitoringEnabled `json:"monitoring_enabled,omitempty"` + MonitoringHttp *struct { + Buffer *struct { + Enabled *bool `json:"enabled,omitempty"` + } `json:"buffer,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + Host *string `json:"host,omitempty"` + Port *float32 `json:"port,omitempty"` + } `json:"monitoring_http,omitempty"` + MonitoringOutputId *string `json:"monitoring_output_id"` + MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` + Name string `json:"name"` + Namespace string `json:"namespace"` + + // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *map[string]interface{} `json:"overrides"` + PackagePolicies *PostFleetAgentPoliciesBulkGet_200_Items_PackagePolicies `json:"package_policies,omitempty"` + RequiredVersions *[]struct { + // Percentage Target percentage of agents to auto upgrade + Percentage float32 `json:"percentage"` + + // Version Target version for automatic agent upgrade + Version string `json:"version"` + } `json:"required_versions"` + Revision float32 `json:"revision"` + SchemaVersion *string `json:"schema_version,omitempty"` + SpaceIds *[]string `json:"space_ids,omitempty"` + Status PostFleetAgentPoliciesBulkGet200ItemsStatus `json:"status"` + + // SupportsAgentless Indicates whether the agent policy supports agentless integrations. + SupportsAgentless *bool `json:"supports_agentless"` + UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` + UnprivilegedAgents *float32 `json:"unprivileged_agents,omitempty"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Version *string `json:"version,omitempty"` + } `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - PostFleetAgentPoliciesDelete(ctx context.Context, body PostFleetAgentPoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // GetFleetAgentPoliciesAgentpolicyid request - GetFleetAgentPoliciesAgentpolicyid(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) + } - // PutFleetAgentPoliciesAgentpolicyidWithBody request with any body - PutFleetAgentPoliciesAgentpolicyidWithBody(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + return response, nil +} - PutFleetAgentPoliciesAgentpolicyid(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, body PutFleetAgentPoliciesAgentpolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) +// ParsePostFleetAgentPoliciesDeleteResponse parses an HTTP response from a PostFleetAgentPoliciesDeleteWithResponse call +func ParsePostFleetAgentPoliciesDeleteResponse(rsp *http.Response) (*PostFleetAgentPoliciesDeleteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // GetFleetEnrollmentApiKeys request - GetFleetEnrollmentApiKeys(ctx context.Context, params *GetFleetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*http.Response, error) + response := &PostFleetAgentPoliciesDeleteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // GetFleetEpmPackages request - GetFleetEpmPackages(ctx context.Context, params *GetFleetEpmPackagesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Id string `json:"id"` + Name string `json:"name"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // PostFleetEpmPackagesWithBody request with any body - PostFleetEpmPackagesWithBody(ctx context.Context, params *PostFleetEpmPackagesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // DeleteFleetEpmPackagesPkgnamePkgversion request - DeleteFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*http.Response, error) + } - // GetFleetEpmPackagesPkgnamePkgversion request - GetFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*http.Response, error) + return response, nil +} - // PostFleetEpmPackagesPkgnamePkgversionWithBody request with any body - PostFleetEpmPackagesPkgnamePkgversionWithBody(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) +// ParsePostFleetAgentPoliciesOutputsResponse parses an HTTP response from a PostFleetAgentPoliciesOutputsWithResponse call +func ParsePostFleetAgentPoliciesOutputsResponse(rsp *http.Response) (*PostFleetAgentPoliciesOutputsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - PostFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, body PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + response := &PostFleetAgentPoliciesOutputsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // GetFleetFleetServerHosts request - GetFleetFleetServerHosts(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + AgentPolicyId *string `json:"agentPolicyId,omitempty"` + Data struct { + Integrations *[]struct { + Id *string `json:"id,omitempty"` + IntegrationPolicyName *string `json:"integrationPolicyName,omitempty"` + Name *string `json:"name,omitempty"` + PkgName *string `json:"pkgName,omitempty"` + } `json:"integrations,omitempty"` + Output struct { + Id string `json:"id"` + Name string `json:"name"` + } `json:"output"` + } `json:"data"` + Monitoring struct { + Output struct { + Id string `json:"id"` + Name string `json:"name"` + } `json:"output"` + } `json:"monitoring"` + } `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // PostFleetFleetServerHostsWithBody request with any body - PostFleetFleetServerHostsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - PostFleetFleetServerHosts(ctx context.Context, body PostFleetFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + } - // DeleteFleetFleetServerHostsItemid request - DeleteFleetFleetServerHostsItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) + return response, nil +} - // GetFleetFleetServerHostsItemid request - GetFleetFleetServerHostsItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) +// ParseGetFleetAgentPoliciesAgentpolicyidResponse parses an HTTP response from a GetFleetAgentPoliciesAgentpolicyidWithResponse call +func ParseGetFleetAgentPoliciesAgentpolicyidResponse(rsp *http.Response) (*GetFleetAgentPoliciesAgentpolicyidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // PutFleetFleetServerHostsItemidWithBody request with any body - PutFleetFleetServerHostsItemidWithBody(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + response := &GetFleetAgentPoliciesAgentpolicyidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - PutFleetFleetServerHostsItemid(ctx context.Context, itemId string, body PutFleetFleetServerHostsItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item AgentPolicy `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // GetFleetOutputs request - GetFleetOutputs(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // PostFleetOutputsWithBody request with any body - PostFleetOutputsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + } - PostFleetOutputs(ctx context.Context, body PostFleetOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + return response, nil +} - // DeleteFleetOutputsOutputid request - DeleteFleetOutputsOutputid(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) +// ParsePutFleetAgentPoliciesAgentpolicyidResponse parses an HTTP response from a PutFleetAgentPoliciesAgentpolicyidWithResponse call +func ParsePutFleetAgentPoliciesAgentpolicyidResponse(rsp *http.Response) (*PutFleetAgentPoliciesAgentpolicyidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // GetFleetOutputsOutputid request - GetFleetOutputsOutputid(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) + response := &PutFleetAgentPoliciesAgentpolicyidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // PutFleetOutputsOutputidWithBody request with any body - PutFleetOutputsOutputidWithBody(ctx context.Context, outputId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item AgentPolicy `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - PutFleetOutputsOutputid(ctx context.Context, outputId string, body PutFleetOutputsOutputidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // GetFleetPackagePolicies request - GetFleetPackagePolicies(ctx context.Context, params *GetFleetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) + } - // PostFleetPackagePoliciesWithBody request with any body - PostFleetPackagePoliciesWithBody(ctx context.Context, params *PostFleetPackagePoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + return response, nil +} - PostFleetPackagePolicies(ctx context.Context, params *PostFleetPackagePoliciesParams, body PostFleetPackagePoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) +// ParseGetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusResponse parses an HTTP response from a GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusWithResponse call +func ParseGetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusResponse(rsp *http.Response) (*GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // DeleteFleetPackagePoliciesPackagepolicyid request - DeleteFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *DeleteFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) + response := &GetFleetAgentPoliciesAgentpolicyidAutoUpgradeAgentsStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // GetFleetPackagePoliciesPackagepolicyid request - GetFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *GetFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + CurrentVersions []struct { + Agents float32 `json:"agents"` + FailedUpgradeAgents float32 `json:"failedUpgradeAgents"` + Version string `json:"version"` + } `json:"currentVersions"` + TotalAgents float32 `json:"totalAgents"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // PutFleetPackagePoliciesPackagepolicyidWithBody request with any body - PutFleetPackagePoliciesPackagepolicyidWithBody(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - PutFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, body PutFleetPackagePoliciesPackagepolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + } - // PostParametersWithBody request with any body - PostParametersWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + return response, nil +} - PostParameters(ctx context.Context, body PostParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) +// ParsePostFleetAgentPoliciesAgentpolicyidCopyResponse parses an HTTP response from a PostFleetAgentPoliciesAgentpolicyidCopyWithResponse call +func ParsePostFleetAgentPoliciesAgentpolicyidCopyResponse(rsp *http.Response) (*PostFleetAgentPoliciesAgentpolicyidCopyResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // DeleteParameter request - DeleteParameter(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + response := &PostFleetAgentPoliciesAgentpolicyidCopyResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // GetParameter request - GetParameter(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + AdvancedSettings *struct { + AgentDownloadTargetDirectory interface{} `json:"agent_download_target_directory"` + AgentDownloadTimeout interface{} `json:"agent_download_timeout"` + AgentLimitsGoMaxProcs interface{} `json:"agent_limits_go_max_procs"` + AgentLoggingFilesInterval interface{} `json:"agent_logging_files_interval"` + AgentLoggingFilesKeepfiles interface{} `json:"agent_logging_files_keepfiles"` + AgentLoggingFilesRotateeverybytes interface{} `json:"agent_logging_files_rotateeverybytes"` + AgentLoggingLevel interface{} `json:"agent_logging_level"` + AgentLoggingMetricsPeriod interface{} `json:"agent_logging_metrics_period"` + AgentLoggingToFiles interface{} `json:"agent_logging_to_files"` + AgentMonitoringRuntimeExperimental interface{} `json:"agent_monitoring_runtime_experimental"` + } `json:"advanced_settings,omitempty"` + AgentFeatures *[]struct { + Enabled bool `json:"enabled"` + Name string `json:"name"` + } `json:"agent_features,omitempty"` + Agentless *struct { + CloudConnectors *struct { + Enabled bool `json:"enabled"` + TargetCsp *string `json:"target_csp,omitempty"` + } `json:"cloud_connectors,omitempty"` + Resources *struct { + Requests *struct { + Cpu *string `json:"cpu,omitempty"` + Memory *string `json:"memory,omitempty"` + } `json:"requests,omitempty"` + } `json:"resources,omitempty"` + } `json:"agentless,omitempty"` + Agents *float32 `json:"agents,omitempty"` + DataOutputId *string `json:"data_output_id"` + Description *string `json:"description,omitempty"` + DownloadSourceId *string `json:"download_source_id"` + FleetServerHostId *string `json:"fleet_server_host_id"` + + // GlobalDataTags User defined data tags that are added to all of the inputs. The values can be strings or numbers. + GlobalDataTags *[]struct { + Name string `json:"name"` + Value PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_GlobalDataTags_Value `json:"value"` + } `json:"global_data_tags,omitempty"` + HasFleetServer *bool `json:"has_fleet_server,omitempty"` + Id string `json:"id"` + InactivityTimeout *float32 `json:"inactivity_timeout,omitempty"` + IsDefault *bool `json:"is_default,omitempty"` + IsDefaultFleetServer *bool `json:"is_default_fleet_server,omitempty"` + IsManaged bool `json:"is_managed"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + + // IsProtected Indicates whether the agent policy has tamper protection enabled. Default false. + IsProtected bool `json:"is_protected"` + + // KeepMonitoringAlive When set to true, monitoring will be enabled but logs/metrics collection will be disabled + KeepMonitoringAlive *bool `json:"keep_monitoring_alive"` + MonitoringDiagnostics *struct { + Limit *struct { + Burst *float32 `json:"burst,omitempty"` + Interval *string `json:"interval,omitempty"` + } `json:"limit,omitempty"` + Uploader *struct { + InitDur *string `json:"init_dur,omitempty"` + MaxDur *string `json:"max_dur,omitempty"` + MaxRetries *float32 `json:"max_retries,omitempty"` + } `json:"uploader,omitempty"` + } `json:"monitoring_diagnostics,omitempty"` + MonitoringEnabled *[]PostFleetAgentPoliciesAgentpolicyidCopy200ItemMonitoringEnabled `json:"monitoring_enabled,omitempty"` + MonitoringHttp *struct { + Buffer *struct { + Enabled *bool `json:"enabled,omitempty"` + } `json:"buffer,omitempty"` + Enabled *bool `json:"enabled,omitempty"` + Host *string `json:"host,omitempty"` + Port *float32 `json:"port,omitempty"` + } `json:"monitoring_http,omitempty"` + MonitoringOutputId *string `json:"monitoring_output_id"` + MonitoringPprofEnabled *bool `json:"monitoring_pprof_enabled,omitempty"` + Name string `json:"name"` + Namespace string `json:"namespace"` + + // Overrides Override settings that are defined in the agent policy. Input settings cannot be overridden. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *map[string]interface{} `json:"overrides"` + PackagePolicies *PostFleetAgentPoliciesAgentpolicyidCopy_200_Item_PackagePolicies `json:"package_policies,omitempty"` + RequiredVersions *[]struct { + // Percentage Target percentage of agents to auto upgrade + Percentage float32 `json:"percentage"` + + // Version Target version for automatic agent upgrade + Version string `json:"version"` + } `json:"required_versions"` + Revision float32 `json:"revision"` + SchemaVersion *string `json:"schema_version,omitempty"` + SpaceIds *[]string `json:"space_ids,omitempty"` + Status PostFleetAgentPoliciesAgentpolicyidCopy200ItemStatus `json:"status"` + + // SupportsAgentless Indicates whether the agent policy supports agentless integrations. + SupportsAgentless *bool `json:"supports_agentless"` + UnenrollTimeout *float32 `json:"unenroll_timeout,omitempty"` + UnprivilegedAgents *float32 `json:"unprivileged_agents,omitempty"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Version *string `json:"version,omitempty"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // PutParameterWithBody request with any body - PutParameterWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - PutParameter(ctx context.Context, id string, body PutParameterJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + } - // DeleteActionsConnectorId request - DeleteActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + return response, nil +} - // GetActionsConnectorId request - GetActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) +// ParseGetFleetAgentPoliciesAgentpolicyidDownloadResponse parses an HTTP response from a GetFleetAgentPoliciesAgentpolicyidDownloadWithResponse call +func ParseGetFleetAgentPoliciesAgentpolicyidDownloadResponse(rsp *http.Response) (*GetFleetAgentPoliciesAgentpolicyidDownloadResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // PostActionsConnectorIdWithBody request with any body - PostActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + response := &GetFleetAgentPoliciesAgentpolicyidDownloadResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - PostActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest string + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // PutActionsConnectorIdWithBody request with any body - PutActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - PutActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - // GetActionsConnectors request - GetActionsConnectors(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) + } - // GetAllDataViewsDefault request - GetAllDataViewsDefault(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) + return response, nil +} - // CreateDataViewDefaultwWithBody request with any body - CreateDataViewDefaultwWithBody(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) +// ParseGetFleetAgentPoliciesAgentpolicyidFullResponse parses an HTTP response from a GetFleetAgentPoliciesAgentpolicyidFullWithResponse call +func ParseGetFleetAgentPoliciesAgentpolicyidFullResponse(rsp *http.Response) (*GetFleetAgentPoliciesAgentpolicyidFullResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - CreateDataViewDefaultw(ctx context.Context, spaceId SpaceId, body CreateDataViewDefaultwJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + response := &GetFleetAgentPoliciesAgentpolicyidFullResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // DeleteDataViewDefault request - DeleteDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*http.Response, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item GetFleetAgentPoliciesAgentpolicyidFull_200_Item `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // GetDataViewDefault request - GetDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // UpdateDataViewDefaultWithBody request with any body - UpdateDataViewDefaultWithBody(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + } - UpdateDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + return response, nil +} - // PostMaintenanceWindowWithBody request with any body - PostMaintenanceWindowWithBody(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) +// ParseGetFleetAgentPoliciesAgentpolicyidOutputsResponse parses an HTTP response from a GetFleetAgentPoliciesAgentpolicyidOutputsWithResponse call +func ParseGetFleetAgentPoliciesAgentpolicyidOutputsResponse(rsp *http.Response) (*GetFleetAgentPoliciesAgentpolicyidOutputsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - PostMaintenanceWindow(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + response := &GetFleetAgentPoliciesAgentpolicyidOutputsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // DeleteMaintenanceWindowId request - DeleteMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + AgentPolicyId *string `json:"agentPolicyId,omitempty"` + Data struct { + Integrations *[]struct { + Id *string `json:"id,omitempty"` + IntegrationPolicyName *string `json:"integrationPolicyName,omitempty"` + Name *string `json:"name,omitempty"` + PkgName *string `json:"pkgName,omitempty"` + } `json:"integrations,omitempty"` + Output struct { + Id string `json:"id"` + Name string `json:"name"` + } `json:"output"` + } `json:"data"` + Monitoring struct { + Output struct { + Id string `json:"id"` + Name string `json:"name"` + } `json:"output"` + } `json:"monitoring"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // GetMaintenanceWindowId request - GetMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // PatchMaintenanceWindowIdWithBody request with any body - PatchMaintenanceWindowIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) + } - PatchMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) + return response, nil } -func (c *Client) DeleteAgentConfigurationWithBody(ctx context.Context, params *DeleteAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteAgentConfigurationRequestWithBody(c.Server, params, contentType, body) +// ParseGetFleetAgentStatusResponse parses an HTTP response from a GetFleetAgentStatusWithResponse call +func ParseGetFleetAgentStatusResponse(rsp *http.Response) (*GetFleetAgentStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) DeleteAgentConfiguration(ctx context.Context, params *DeleteAgentConfigurationParams, body DeleteAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteAgentConfigurationRequest(c.Server, params, body) - if err != nil { - return nil, err + response := &GetFleetAgentStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Results struct { + Active float32 `json:"active"` + All float32 `json:"all"` + Error float32 `json:"error"` + Events float32 `json:"events"` + Inactive float32 `json:"inactive"` + Offline float32 `json:"offline"` + Online float32 `json:"online"` + Orphaned *float32 `json:"orphaned,omitempty"` + Other float32 `json:"other"` + Unenrolled float32 `json:"unenrolled"` + Uninstalled *float32 `json:"uninstalled,omitempty"` + Updating float32 `json:"updating"` + } `json:"results"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) GetAgentConfigurations(ctx context.Context, params *GetAgentConfigurationsParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetAgentConfigurationsRequest(c.Server, params) +// ParseGetFleetAgentStatusDataResponse parses an HTTP response from a GetFleetAgentStatusDataWithResponse call +func ParseGetFleetAgentStatusDataResponse(rsp *http.Response) (*GetFleetAgentStatusDataResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) CreateUpdateAgentConfigurationWithBody(ctx context.Context, params *CreateUpdateAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateUpdateAgentConfigurationRequestWithBody(c.Server, params, contentType, body) - if err != nil { - return nil, err + response := &GetFleetAgentStatusDataResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + DataPreview []interface{} `json:"dataPreview"` + Items []map[string]struct { + Data bool `json:"data"` + } `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) CreateUpdateAgentConfiguration(ctx context.Context, params *CreateUpdateAgentConfigurationParams, body CreateUpdateAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateUpdateAgentConfigurationRequest(c.Server, params, body) +// ParseGetFleetAgentsResponse parses an HTTP response from a GetFleetAgentsWithResponse call +func ParseGetFleetAgentsResponse(rsp *http.Response) (*GetFleetAgentsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) GetFleetAgentPolicies(ctx context.Context, params *GetFleetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetFleetAgentPoliciesRequest(c.Server, params) - if err != nil { - return nil, err + response := &GetFleetAgentsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + AccessApiKey *string `json:"access_api_key,omitempty"` + AccessApiKeyId *string `json:"access_api_key_id,omitempty"` + Active bool `json:"active"` + Agent *GetFleetAgents_200_Items_Agent `json:"agent,omitempty"` + AuditUnenrolledReason *string `json:"audit_unenrolled_reason,omitempty"` + Components *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Status GetFleetAgents200ItemsComponentsStatus `json:"status"` + Type string `json:"type"` + Units *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Payload *map[string]interface{} `json:"payload,omitempty"` + Status GetFleetAgents200ItemsComponentsUnitsStatus `json:"status"` + Type GetFleetAgents200ItemsComponentsUnitsType `json:"type"` + } `json:"units,omitempty"` + } `json:"components,omitempty"` + DefaultApiKey *string `json:"default_api_key,omitempty"` + DefaultApiKeyHistory *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"default_api_key_history,omitempty"` + DefaultApiKeyId *string `json:"default_api_key_id,omitempty"` + EnrolledAt string `json:"enrolled_at"` + Id string `json:"id"` + LastCheckin *string `json:"last_checkin,omitempty"` + LastCheckinMessage *string `json:"last_checkin_message,omitempty"` + LastCheckinStatus *GetFleetAgents200ItemsLastCheckinStatus `json:"last_checkin_status,omitempty"` + LastKnownStatus *GetFleetAgents200ItemsLastKnownStatus `json:"last_known_status,omitempty"` + LocalMetadata map[string]interface{} `json:"local_metadata"` + Metrics *struct { + CpuAvg *float32 `json:"cpu_avg,omitempty"` + MemorySizeByteAvg *float32 `json:"memory_size_byte_avg,omitempty"` + } `json:"metrics,omitempty"` + Namespaces *[]string `json:"namespaces,omitempty"` + Outputs *map[string]struct { + ApiKeyId *string `json:"api_key_id,omitempty"` + ToRetireApiKeyIds *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"to_retire_api_key_ids,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"outputs,omitempty"` + Packages []string `json:"packages"` + PolicyId *string `json:"policy_id,omitempty"` + PolicyRevision *float32 `json:"policy_revision"` + Sort *[]interface{} `json:"sort,omitempty"` + Status *GetFleetAgents200ItemsStatus `json:"status,omitempty"` + Tags *[]string `json:"tags,omitempty"` + Type GetFleetAgents200ItemsType `json:"type"` + UnenrolledAt *string `json:"unenrolled_at,omitempty"` + UnenrollmentStartedAt *string `json:"unenrollment_started_at,omitempty"` + UnhealthyReason *[]GetFleetAgents200ItemsUnhealthyReason `json:"unhealthy_reason"` + UpgradeAttempts *[]string `json:"upgrade_attempts"` + UpgradeDetails *struct { + ActionId string `json:"action_id"` + Metadata *struct { + DownloadPercent *float32 `json:"download_percent,omitempty"` + DownloadRate *float32 `json:"download_rate,omitempty"` + ErrorMsg *string `json:"error_msg,omitempty"` + FailedState *GetFleetAgents200ItemsUpgradeDetailsMetadataFailedState `json:"failed_state,omitempty"` + RetryErrorMsg *string `json:"retry_error_msg,omitempty"` + RetryUntil *string `json:"retry_until,omitempty"` + ScheduledAt *string `json:"scheduled_at,omitempty"` + } `json:"metadata,omitempty"` + State GetFleetAgents200ItemsUpgradeDetailsState `json:"state"` + TargetVersion string `json:"target_version"` + } `json:"upgrade_details"` + UpgradeStartedAt *string `json:"upgrade_started_at"` + UpgradedAt *string `json:"upgraded_at"` + UserProvidedMetadata *map[string]interface{} `json:"user_provided_metadata,omitempty"` + } `json:"items"` + NextSearchAfter *string `json:"nextSearchAfter,omitempty"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Pit *string `json:"pit,omitempty"` + StatusSummary *map[string]float32 `json:"statusSummary,omitempty"` + Total float32 `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PostFleetAgentPoliciesWithBody(ctx context.Context, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetAgentPoliciesRequestWithBody(c.Server, params, contentType, body) +// ParsePostFleetAgentsResponse parses an HTTP response from a PostFleetAgentsWithResponse call +func ParsePostFleetAgentsResponse(rsp *http.Response) (*PostFleetAgentsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PostFleetAgentPolicies(ctx context.Context, params *PostFleetAgentPoliciesParams, body PostFleetAgentPoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetAgentPoliciesRequest(c.Server, params, body) - if err != nil { - return nil, err + response := &PostFleetAgentsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []string `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PostFleetAgentPoliciesDeleteWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetAgentPoliciesDeleteRequestWithBody(c.Server, contentType, body) +// ParseGetFleetAgentsActionStatusResponse parses an HTTP response from a GetFleetAgentsActionStatusWithResponse call +func ParseGetFleetAgentsActionStatusResponse(rsp *http.Response) (*GetFleetAgentsActionStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PostFleetAgentPoliciesDelete(ctx context.Context, body PostFleetAgentPoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetAgentPoliciesDeleteRequest(c.Server, body) - if err != nil { - return nil, err + response := &GetFleetAgentsActionStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + ActionId string `json:"actionId"` + CancellationTime *string `json:"cancellationTime,omitempty"` + CompletionTime *string `json:"completionTime,omitempty"` + + // CreationTime creation time of action + CreationTime string `json:"creationTime"` + Expiration *string `json:"expiration,omitempty"` + HasRolloutPeriod *bool `json:"hasRolloutPeriod,omitempty"` + IsAutomatic *bool `json:"is_automatic,omitempty"` + LatestErrors *[]struct { + AgentId string `json:"agentId"` + Error string `json:"error"` + Hostname *string `json:"hostname,omitempty"` + Timestamp string `json:"timestamp"` + } `json:"latestErrors,omitempty"` + + // NbAgentsAck number of agents that acknowledged the action + NbAgentsAck float32 `json:"nbAgentsAck"` + + // NbAgentsActionCreated number of agents included in action from kibana + NbAgentsActionCreated float32 `json:"nbAgentsActionCreated"` + + // NbAgentsActioned number of agents actioned + NbAgentsActioned float32 `json:"nbAgentsActioned"` + + // NbAgentsFailed number of agents that failed to execute the action + NbAgentsFailed float32 `json:"nbAgentsFailed"` + + // NewPolicyId new policy id (POLICY_REASSIGN action) + NewPolicyId *string `json:"newPolicyId,omitempty"` + + // PolicyId policy id (POLICY_CHANGE action) + PolicyId *string `json:"policyId,omitempty"` + + // Revision new policy revision (POLICY_CHANGE action) + Revision *float32 `json:"revision,omitempty"` + + // StartTime start time of action (scheduled actions) + StartTime *string `json:"startTime,omitempty"` + Status GetFleetAgentsActionStatus200ItemsStatus `json:"status"` + Type GetFleetAgentsActionStatus200ItemsType `json:"type"` + + // Version agent version number (UPGRADE action) + Version *string `json:"version,omitempty"` + } `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) GetFleetAgentPoliciesAgentpolicyid(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetFleetAgentPoliciesAgentpolicyidRequest(c.Server, agentPolicyId, params) +// ParsePostFleetAgentsActionsActionidCancelResponse parses an HTTP response from a PostFleetAgentsActionsActionidCancelWithResponse call +func ParsePostFleetAgentsActionsActionidCancelResponse(rsp *http.Response) (*PostFleetAgentsActionsActionidCancelResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PutFleetAgentPoliciesAgentpolicyidWithBody(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutFleetAgentPoliciesAgentpolicyidRequestWithBody(c.Server, agentPolicyId, params, contentType, body) - if err != nil { - return nil, err + response := &PostFleetAgentsActionsActionidCancelResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + AckData interface{} `json:"ack_data"` + Agents *[]string `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + Data interface{} `json:"data"` + Expiration *string `json:"expiration,omitempty"` + Id string `json:"id"` + MinimumExecutionDuration *float32 `json:"minimum_execution_duration,omitempty"` + Namespaces *[]string `json:"namespaces,omitempty"` + RolloutDurationSeconds *float32 `json:"rollout_duration_seconds,omitempty"` + SentAt *string `json:"sent_at,omitempty"` + SourceUri *string `json:"source_uri,omitempty"` + StartTime *string `json:"start_time,omitempty"` + Total *float32 `json:"total,omitempty"` + Type string `json:"type"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PutFleetAgentPoliciesAgentpolicyid(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, body PutFleetAgentPoliciesAgentpolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutFleetAgentPoliciesAgentpolicyidRequest(c.Server, agentPolicyId, params, body) +// ParseGetFleetAgentsAvailableVersionsResponse parses an HTTP response from a GetFleetAgentsAvailableVersionsWithResponse call +func ParseGetFleetAgentsAvailableVersionsResponse(rsp *http.Response) (*GetFleetAgentsAvailableVersionsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + response := &GetFleetAgentsAvailableVersionsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return c.Client.Do(req) + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []string `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil } -func (c *Client) GetFleetEnrollmentApiKeys(ctx context.Context, params *GetFleetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetFleetEnrollmentApiKeysRequest(c.Server, params) +// ParsePostFleetAgentsBulkReassignResponse parses an HTTP response from a PostFleetAgentsBulkReassignWithResponse call +func ParsePostFleetAgentsBulkReassignResponse(rsp *http.Response) (*PostFleetAgentsBulkReassignResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) GetFleetEpmPackages(ctx context.Context, params *GetFleetEpmPackagesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetFleetEpmPackagesRequest(c.Server, params) - if err != nil { - return nil, err + response := &PostFleetAgentsBulkReassignResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + ActionId string `json:"actionId"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PostFleetEpmPackagesWithBody(ctx context.Context, params *PostFleetEpmPackagesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetEpmPackagesRequestWithBody(c.Server, params, contentType, body) +// ParsePostFleetAgentsBulkRequestDiagnosticsResponse parses an HTTP response from a PostFleetAgentsBulkRequestDiagnosticsWithResponse call +func ParsePostFleetAgentsBulkRequestDiagnosticsResponse(rsp *http.Response) (*PostFleetAgentsBulkRequestDiagnosticsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) DeleteFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteFleetEpmPackagesPkgnamePkgversionRequest(c.Server, pkgName, pkgVersion, params) - if err != nil { - return nil, err + response := &PostFleetAgentsBulkRequestDiagnosticsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + ActionId string `json:"actionId"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) GetFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetFleetEpmPackagesPkgnamePkgversionRequest(c.Server, pkgName, pkgVersion, params) +// ParsePostFleetAgentsBulkUnenrollResponse parses an HTTP response from a PostFleetAgentsBulkUnenrollWithResponse call +func ParsePostFleetAgentsBulkUnenrollResponse(rsp *http.Response) (*PostFleetAgentsBulkUnenrollResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PostFleetEpmPackagesPkgnamePkgversionWithBody(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetEpmPackagesPkgnamePkgversionRequestWithBody(c.Server, pkgName, pkgVersion, params, contentType, body) - if err != nil { - return nil, err + response := &PostFleetAgentsBulkUnenrollResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + ActionId string `json:"actionId"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PostFleetEpmPackagesPkgnamePkgversion(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, body PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetEpmPackagesPkgnamePkgversionRequest(c.Server, pkgName, pkgVersion, params, body) +// ParsePostFleetAgentsBulkUpdateAgentTagsResponse parses an HTTP response from a PostFleetAgentsBulkUpdateAgentTagsWithResponse call +func ParsePostFleetAgentsBulkUpdateAgentTagsResponse(rsp *http.Response) (*PostFleetAgentsBulkUpdateAgentTagsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) GetFleetFleetServerHosts(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetFleetFleetServerHostsRequest(c.Server) - if err != nil { - return nil, err + response := &PostFleetAgentsBulkUpdateAgentTagsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + ActionId string `json:"actionId"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PostFleetFleetServerHostsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetFleetServerHostsRequestWithBody(c.Server, contentType, body) +// ParsePostFleetAgentsBulkUpgradeResponse parses an HTTP response from a PostFleetAgentsBulkUpgradeWithResponse call +func ParsePostFleetAgentsBulkUpgradeResponse(rsp *http.Response) (*PostFleetAgentsBulkUpgradeResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PostFleetFleetServerHosts(ctx context.Context, body PostFleetFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetFleetServerHostsRequest(c.Server, body) - if err != nil { - return nil, err + response := &PostFleetAgentsBulkUpgradeResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + ActionId string `json:"actionId"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) DeleteFleetFleetServerHostsItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteFleetFleetServerHostsItemidRequest(c.Server, itemId) +// ParseDeleteFleetAgentsFilesFileidResponse parses an HTTP response from a DeleteFleetAgentsFilesFileidWithResponse call +func ParseDeleteFleetAgentsFilesFileidResponse(rsp *http.Response) (*DeleteFleetAgentsFilesFileidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) GetFleetFleetServerHostsItemid(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetFleetFleetServerHostsItemidRequest(c.Server, itemId) - if err != nil { - return nil, err + response := &DeleteFleetAgentsFilesFileidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Deleted bool `json:"deleted"` + Id string `json:"id"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PutFleetFleetServerHostsItemidWithBody(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutFleetFleetServerHostsItemidRequestWithBody(c.Server, itemId, contentType, body) +// ParseGetFleetAgentsFilesFileidFilenameResponse parses an HTTP response from a GetFleetAgentsFilesFileidFilenameWithResponse call +func ParseGetFleetAgentsFilesFileidFilenameResponse(rsp *http.Response) (*GetFleetAgentsFilesFileidFilenameResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PutFleetFleetServerHostsItemid(ctx context.Context, itemId string, body PutFleetFleetServerHostsItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutFleetFleetServerHostsItemidRequest(c.Server, itemId, body) - if err != nil { - return nil, err + response := &GetFleetAgentsFilesFileidFilenameResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) GetFleetOutputs(ctx context.Context, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetFleetOutputsRequest(c.Server) +// ParseGetFleetAgentsSetupResponse parses an HTTP response from a GetFleetAgentsSetupWithResponse call +func ParseGetFleetAgentsSetupResponse(rsp *http.Response) (*GetFleetAgentsSetupResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PostFleetOutputsWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetOutputsRequestWithBody(c.Server, contentType, body) - if err != nil { - return nil, err + response := &GetFleetAgentsSetupResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + IsReady bool `json:"isReady"` + IsSecretsStorageEnabled *bool `json:"is_secrets_storage_enabled,omitempty"` + IsSpaceAwarenessEnabled *bool `json:"is_space_awareness_enabled,omitempty"` + MissingOptionalFeatures []GetFleetAgentsSetup200MissingOptionalFeatures `json:"missing_optional_features"` + MissingRequirements []GetFleetAgentsSetup200MissingRequirements `json:"missing_requirements"` + PackageVerificationKeyId *string `json:"package_verification_key_id,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PostFleetOutputs(ctx context.Context, body PostFleetOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetOutputsRequest(c.Server, body) +// ParsePostFleetAgentsSetupResponse parses an HTTP response from a PostFleetAgentsSetupWithResponse call +func ParsePostFleetAgentsSetupResponse(rsp *http.Response) (*PostFleetAgentsSetupResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) DeleteFleetOutputsOutputid(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteFleetOutputsOutputidRequest(c.Server, outputId) - if err != nil { - return nil, err + response := &PostFleetAgentsSetupResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + IsInitialized bool `json:"isInitialized"` + NonFatalErrors []struct { + Message string `json:"message"` + Name string `json:"name"` + } `json:"nonFatalErrors"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) GetFleetOutputsOutputid(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetFleetOutputsOutputidRequest(c.Server, outputId) +// ParseGetFleetAgentsTagsResponse parses an HTTP response from a GetFleetAgentsTagsWithResponse call +func ParseGetFleetAgentsTagsResponse(rsp *http.Response) (*GetFleetAgentsTagsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + response := &GetFleetAgentsTagsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return c.Client.Do(req) + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []string `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil } -func (c *Client) PutFleetOutputsOutputidWithBody(ctx context.Context, outputId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutFleetOutputsOutputidRequestWithBody(c.Server, outputId, contentType, body) +// ParseDeleteFleetAgentsAgentidResponse parses an HTTP response from a DeleteFleetAgentsAgentidWithResponse call +func ParseDeleteFleetAgentsAgentidResponse(rsp *http.Response) (*DeleteFleetAgentsAgentidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + response := &DeleteFleetAgentsAgentidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return c.Client.Do(req) + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Action DeleteFleetAgentsAgentid200Action `json:"action"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil } -func (c *Client) PutFleetOutputsOutputid(ctx context.Context, outputId string, body PutFleetOutputsOutputidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutFleetOutputsOutputidRequest(c.Server, outputId, body) +// ParseGetFleetAgentsAgentidResponse parses an HTTP response from a GetFleetAgentsAgentidWithResponse call +func ParseGetFleetAgentsAgentidResponse(rsp *http.Response) (*GetFleetAgentsAgentidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + response := &GetFleetAgentsAgentidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return c.Client.Do(req) -} -func (c *Client) GetFleetPackagePolicies(ctx context.Context, params *GetFleetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetFleetPackagePoliciesRequest(c.Server, params) - if err != nil { - return nil, err - } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + AccessApiKey *string `json:"access_api_key,omitempty"` + AccessApiKeyId *string `json:"access_api_key_id,omitempty"` + Active bool `json:"active"` + Agent *GetFleetAgentsAgentid_200_Item_Agent `json:"agent,omitempty"` + AuditUnenrolledReason *string `json:"audit_unenrolled_reason,omitempty"` + Components *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Status GetFleetAgentsAgentid200ItemComponentsStatus `json:"status"` + Type string `json:"type"` + Units *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Payload *map[string]interface{} `json:"payload,omitempty"` + Status GetFleetAgentsAgentid200ItemComponentsUnitsStatus `json:"status"` + Type GetFleetAgentsAgentid200ItemComponentsUnitsType `json:"type"` + } `json:"units,omitempty"` + } `json:"components,omitempty"` + DefaultApiKey *string `json:"default_api_key,omitempty"` + DefaultApiKeyHistory *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"default_api_key_history,omitempty"` + DefaultApiKeyId *string `json:"default_api_key_id,omitempty"` + EnrolledAt string `json:"enrolled_at"` + Id string `json:"id"` + LastCheckin *string `json:"last_checkin,omitempty"` + LastCheckinMessage *string `json:"last_checkin_message,omitempty"` + LastCheckinStatus *GetFleetAgentsAgentid200ItemLastCheckinStatus `json:"last_checkin_status,omitempty"` + LastKnownStatus *GetFleetAgentsAgentid200ItemLastKnownStatus `json:"last_known_status,omitempty"` + LocalMetadata map[string]interface{} `json:"local_metadata"` + Metrics *struct { + CpuAvg *float32 `json:"cpu_avg,omitempty"` + MemorySizeByteAvg *float32 `json:"memory_size_byte_avg,omitempty"` + } `json:"metrics,omitempty"` + Namespaces *[]string `json:"namespaces,omitempty"` + Outputs *map[string]struct { + ApiKeyId *string `json:"api_key_id,omitempty"` + ToRetireApiKeyIds *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"to_retire_api_key_ids,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"outputs,omitempty"` + Packages []string `json:"packages"` + PolicyId *string `json:"policy_id,omitempty"` + PolicyRevision *float32 `json:"policy_revision"` + Sort *[]interface{} `json:"sort,omitempty"` + Status *GetFleetAgentsAgentid200ItemStatus `json:"status,omitempty"` + Tags *[]string `json:"tags,omitempty"` + Type GetFleetAgentsAgentid200ItemType `json:"type"` + UnenrolledAt *string `json:"unenrolled_at,omitempty"` + UnenrollmentStartedAt *string `json:"unenrollment_started_at,omitempty"` + UnhealthyReason *[]GetFleetAgentsAgentid200ItemUnhealthyReason `json:"unhealthy_reason"` + UpgradeAttempts *[]string `json:"upgrade_attempts"` + UpgradeDetails *struct { + ActionId string `json:"action_id"` + Metadata *struct { + DownloadPercent *float32 `json:"download_percent,omitempty"` + DownloadRate *float32 `json:"download_rate,omitempty"` + ErrorMsg *string `json:"error_msg,omitempty"` + FailedState *GetFleetAgentsAgentid200ItemUpgradeDetailsMetadataFailedState `json:"failed_state,omitempty"` + RetryErrorMsg *string `json:"retry_error_msg,omitempty"` + RetryUntil *string `json:"retry_until,omitempty"` + ScheduledAt *string `json:"scheduled_at,omitempty"` + } `json:"metadata,omitempty"` + State GetFleetAgentsAgentid200ItemUpgradeDetailsState `json:"state"` + TargetVersion string `json:"target_version"` + } `json:"upgrade_details"` + UpgradeStartedAt *string `json:"upgrade_started_at"` + UpgradedAt *string `json:"upgraded_at"` + UserProvidedMetadata *map[string]interface{} `json:"user_provided_metadata,omitempty"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PostFleetPackagePoliciesWithBody(ctx context.Context, params *PostFleetPackagePoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetPackagePoliciesRequestWithBody(c.Server, params, contentType, body) +// ParsePutFleetAgentsAgentidResponse parses an HTTP response from a PutFleetAgentsAgentidWithResponse call +func ParsePutFleetAgentsAgentidResponse(rsp *http.Response) (*PutFleetAgentsAgentidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PostFleetPackagePolicies(ctx context.Context, params *PostFleetPackagePoliciesParams, body PostFleetPackagePoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostFleetPackagePoliciesRequest(c.Server, params, body) - if err != nil { - return nil, err + response := &PutFleetAgentsAgentidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + AccessApiKey *string `json:"access_api_key,omitempty"` + AccessApiKeyId *string `json:"access_api_key_id,omitempty"` + Active bool `json:"active"` + Agent *PutFleetAgentsAgentid_200_Item_Agent `json:"agent,omitempty"` + AuditUnenrolledReason *string `json:"audit_unenrolled_reason,omitempty"` + Components *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Status PutFleetAgentsAgentid200ItemComponentsStatus `json:"status"` + Type string `json:"type"` + Units *[]struct { + Id string `json:"id"` + Message string `json:"message"` + Payload *map[string]interface{} `json:"payload,omitempty"` + Status PutFleetAgentsAgentid200ItemComponentsUnitsStatus `json:"status"` + Type PutFleetAgentsAgentid200ItemComponentsUnitsType `json:"type"` + } `json:"units,omitempty"` + } `json:"components,omitempty"` + DefaultApiKey *string `json:"default_api_key,omitempty"` + DefaultApiKeyHistory *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"default_api_key_history,omitempty"` + DefaultApiKeyId *string `json:"default_api_key_id,omitempty"` + EnrolledAt string `json:"enrolled_at"` + Id string `json:"id"` + LastCheckin *string `json:"last_checkin,omitempty"` + LastCheckinMessage *string `json:"last_checkin_message,omitempty"` + LastCheckinStatus *PutFleetAgentsAgentid200ItemLastCheckinStatus `json:"last_checkin_status,omitempty"` + LastKnownStatus *PutFleetAgentsAgentid200ItemLastKnownStatus `json:"last_known_status,omitempty"` + LocalMetadata map[string]interface{} `json:"local_metadata"` + Metrics *struct { + CpuAvg *float32 `json:"cpu_avg,omitempty"` + MemorySizeByteAvg *float32 `json:"memory_size_byte_avg,omitempty"` + } `json:"metrics,omitempty"` + Namespaces *[]string `json:"namespaces,omitempty"` + Outputs *map[string]struct { + ApiKeyId *string `json:"api_key_id,omitempty"` + ToRetireApiKeyIds *[]struct { + Id string `json:"id"` + RetiredAt string `json:"retired_at"` + } `json:"to_retire_api_key_ids,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"outputs,omitempty"` + Packages []string `json:"packages"` + PolicyId *string `json:"policy_id,omitempty"` + PolicyRevision *float32 `json:"policy_revision"` + Sort *[]interface{} `json:"sort,omitempty"` + Status *PutFleetAgentsAgentid200ItemStatus `json:"status,omitempty"` + Tags *[]string `json:"tags,omitempty"` + Type PutFleetAgentsAgentid200ItemType `json:"type"` + UnenrolledAt *string `json:"unenrolled_at,omitempty"` + UnenrollmentStartedAt *string `json:"unenrollment_started_at,omitempty"` + UnhealthyReason *[]PutFleetAgentsAgentid200ItemUnhealthyReason `json:"unhealthy_reason"` + UpgradeAttempts *[]string `json:"upgrade_attempts"` + UpgradeDetails *struct { + ActionId string `json:"action_id"` + Metadata *struct { + DownloadPercent *float32 `json:"download_percent,omitempty"` + DownloadRate *float32 `json:"download_rate,omitempty"` + ErrorMsg *string `json:"error_msg,omitempty"` + FailedState *PutFleetAgentsAgentid200ItemUpgradeDetailsMetadataFailedState `json:"failed_state,omitempty"` + RetryErrorMsg *string `json:"retry_error_msg,omitempty"` + RetryUntil *string `json:"retry_until,omitempty"` + ScheduledAt *string `json:"scheduled_at,omitempty"` + } `json:"metadata,omitempty"` + State PutFleetAgentsAgentid200ItemUpgradeDetailsState `json:"state"` + TargetVersion string `json:"target_version"` + } `json:"upgrade_details"` + UpgradeStartedAt *string `json:"upgrade_started_at"` + UpgradedAt *string `json:"upgraded_at"` + UserProvidedMetadata *map[string]interface{} `json:"user_provided_metadata,omitempty"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) DeleteFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *DeleteFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteFleetPackagePoliciesPackagepolicyidRequest(c.Server, packagePolicyId, params) +// ParsePostFleetAgentsAgentidActionsResponse parses an HTTP response from a PostFleetAgentsAgentidActionsWithResponse call +func ParsePostFleetAgentsAgentidActionsResponse(rsp *http.Response) (*PostFleetAgentsAgentidActionsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) GetFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *GetFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetFleetPackagePoliciesPackagepolicyidRequest(c.Server, packagePolicyId, params) - if err != nil { - return nil, err + response := &PostFleetAgentsAgentidActionsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + AckData interface{} `json:"ack_data"` + Agents *[]string `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + Data interface{} `json:"data"` + Expiration *string `json:"expiration,omitempty"` + Id string `json:"id"` + MinimumExecutionDuration *float32 `json:"minimum_execution_duration,omitempty"` + Namespaces *[]string `json:"namespaces,omitempty"` + RolloutDurationSeconds *float32 `json:"rollout_duration_seconds,omitempty"` + SentAt *string `json:"sent_at,omitempty"` + SourceUri *string `json:"source_uri,omitempty"` + StartTime *string `json:"start_time,omitempty"` + Total *float32 `json:"total,omitempty"` + Type string `json:"type"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PutFleetPackagePoliciesPackagepolicyidWithBody(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutFleetPackagePoliciesPackagepolicyidRequestWithBody(c.Server, packagePolicyId, params, contentType, body) +// ParsePostFleetAgentsAgentidReassignResponse parses an HTTP response from a PostFleetAgentsAgentidReassignWithResponse call +func ParsePostFleetAgentsAgentidReassignResponse(rsp *http.Response) (*PostFleetAgentsAgentidReassignResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PutFleetPackagePoliciesPackagepolicyid(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, body PutFleetPackagePoliciesPackagepolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutFleetPackagePoliciesPackagepolicyidRequest(c.Server, packagePolicyId, params, body) - if err != nil { - return nil, err + response := &PostFleetAgentsAgentidReassignResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PostParametersWithBody(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostParametersRequestWithBody(c.Server, contentType, body) +// ParsePostFleetAgentsAgentidRequestDiagnosticsResponse parses an HTTP response from a PostFleetAgentsAgentidRequestDiagnosticsWithResponse call +func ParsePostFleetAgentsAgentidRequestDiagnosticsResponse(rsp *http.Response) (*PostFleetAgentsAgentidRequestDiagnosticsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PostParameters(ctx context.Context, body PostParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostParametersRequest(c.Server, body) - if err != nil { - return nil, err + response := &PostFleetAgentsAgentidRequestDiagnosticsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + ActionId string `json:"actionId"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) DeleteParameter(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteParameterRequest(c.Server, id) +// ParsePostFleetAgentsAgentidUnenrollResponse parses an HTTP response from a PostFleetAgentsAgentidUnenrollWithResponse call +func ParsePostFleetAgentsAgentidUnenrollResponse(rsp *http.Response) (*PostFleetAgentsAgentidUnenrollResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + response := &PostFleetAgentsAgentidUnenrollResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return c.Client.Do(req) + + return response, nil } -func (c *Client) GetParameter(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetParameterRequest(c.Server, id) +// ParsePostFleetAgentsAgentidUpgradeResponse parses an HTTP response from a PostFleetAgentsAgentidUpgradeWithResponse call +func ParsePostFleetAgentsAgentidUpgradeResponse(rsp *http.Response) (*PostFleetAgentsAgentidUpgradeResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PutParameterWithBody(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutParameterRequestWithBody(c.Server, id, contentType, body) - if err != nil { - return nil, err + response := &PostFleetAgentsAgentidUpgradeResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PutParameter(ctx context.Context, id string, body PutParameterJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutParameterRequest(c.Server, id, body) +// ParseGetFleetAgentsAgentidUploadsResponse parses an HTTP response from a GetFleetAgentsAgentidUploadsWithResponse call +func ParseGetFleetAgentsAgentidUploadsResponse(rsp *http.Response) (*GetFleetAgentsAgentidUploadsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) DeleteActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteActionsConnectorIdRequest(c.Server, spaceId, id) - if err != nil { - return nil, err + response := &GetFleetAgentsAgentidUploadsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + ActionId string `json:"actionId"` + CreateTime string `json:"createTime"` + Error *string `json:"error,omitempty"` + FilePath string `json:"filePath"` + Id string `json:"id"` + Name string `json:"name"` + Status GetFleetAgentsAgentidUploads200ItemsStatus `json:"status"` + } `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) GetActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetActionsConnectorIdRequest(c.Server, spaceId, id) +// ParseGetFleetCheckPermissionsResponse parses an HTTP response from a GetFleetCheckPermissionsWithResponse call +func ParseGetFleetCheckPermissionsResponse(rsp *http.Response) (*GetFleetCheckPermissionsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PostActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostActionsConnectorIdRequestWithBody(c.Server, spaceId, id, contentType, body) - if err != nil { - return nil, err + response := &GetFleetCheckPermissionsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Error *GetFleetCheckPermissions200Error `json:"error,omitempty"` + Success bool `json:"success"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PostActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostActionsConnectorIdRequest(c.Server, spaceId, id, body) +// ParseGetFleetDataStreamsResponse parses an HTTP response from a GetFleetDataStreamsWithResponse call +func ParseGetFleetDataStreamsResponse(rsp *http.Response) (*GetFleetDataStreamsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PutActionsConnectorIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutActionsConnectorIdRequestWithBody(c.Server, spaceId, id, contentType, body) - if err != nil { - return nil, err + response := &GetFleetDataStreamsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + DataStreams []struct { + Dashboards []struct { + Id string `json:"id"` + Title string `json:"title"` + } `json:"dashboards"` + Dataset string `json:"dataset"` + Index string `json:"index"` + LastActivityMs float32 `json:"last_activity_ms"` + Namespace string `json:"namespace"` + Package string `json:"package"` + PackageVersion string `json:"package_version"` + ServiceDetails *struct { + Environment string `json:"environment"` + ServiceName string `json:"serviceName"` + } `json:"serviceDetails"` + SizeInBytes float32 `json:"size_in_bytes"` + SizeInBytesFormatted GetFleetDataStreams_200_DataStreams_SizeInBytesFormatted `json:"size_in_bytes_formatted"` + Type string `json:"type"` + } `json:"data_streams"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PutActionsConnectorId(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPutActionsConnectorIdRequest(c.Server, spaceId, id, body) +// ParseGetFleetEnrollmentApiKeysResponse parses an HTTP response from a GetFleetEnrollmentApiKeysWithResponse call +func ParseGetFleetEnrollmentApiKeysResponse(rsp *http.Response) (*GetFleetEnrollmentApiKeysResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) GetActionsConnectors(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetActionsConnectorsRequest(c.Server, spaceId) - if err != nil { - return nil, err + response := &GetFleetEnrollmentApiKeysResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []EnrollmentApiKey `json:"items"` + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + List []struct { + // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. + Active bool `json:"active"` + + // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. + ApiKey string `json:"api_key"` + + // ApiKeyId The ID of the API key in the Security API. + ApiKeyId string `json:"api_key_id"` + CreatedAt string `json:"created_at"` + Hidden *bool `json:"hidden,omitempty"` + Id string `json:"id"` + + // Name The name of the enrollment API key. + Name *string `json:"name,omitempty"` + + // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. + PolicyId *string `json:"policy_id,omitempty"` + } `json:"list"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) GetAllDataViewsDefault(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetAllDataViewsDefaultRequest(c.Server, spaceId) +// ParsePostFleetEnrollmentApiKeysResponse parses an HTTP response from a PostFleetEnrollmentApiKeysWithResponse call +func ParsePostFleetEnrollmentApiKeysResponse(rsp *http.Response) (*PostFleetEnrollmentApiKeysResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) CreateDataViewDefaultwWithBody(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateDataViewDefaultwRequestWithBody(c.Server, spaceId, contentType, body) - if err != nil { - return nil, err + response := &PostFleetEnrollmentApiKeysResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Action PostFleetEnrollmentApiKeys200Action `json:"action"` + Item struct { + // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. + Active bool `json:"active"` + + // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. + ApiKey string `json:"api_key"` + + // ApiKeyId The ID of the API key in the Security API. + ApiKeyId string `json:"api_key_id"` + CreatedAt string `json:"created_at"` + Hidden *bool `json:"hidden,omitempty"` + Id string `json:"id"` + + // Name The name of the enrollment API key. + Name *string `json:"name,omitempty"` + + // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. + PolicyId *string `json:"policy_id,omitempty"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) CreateDataViewDefaultw(ctx context.Context, spaceId SpaceId, body CreateDataViewDefaultwJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewCreateDataViewDefaultwRequest(c.Server, spaceId, body) +// ParseDeleteFleetEnrollmentApiKeysKeyidResponse parses an HTTP response from a DeleteFleetEnrollmentApiKeysKeyidWithResponse call +func ParseDeleteFleetEnrollmentApiKeysKeyidResponse(rsp *http.Response) (*DeleteFleetEnrollmentApiKeysKeyidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) DeleteDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteDataViewDefaultRequest(c.Server, spaceId, viewId) - if err != nil { - return nil, err + response := &DeleteFleetEnrollmentApiKeysKeyidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Action DeleteFleetEnrollmentApiKeysKeyid200Action `json:"action"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) GetDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetDataViewDefaultRequest(c.Server, spaceId, viewId) +// ParseGetFleetEnrollmentApiKeysKeyidResponse parses an HTTP response from a GetFleetEnrollmentApiKeysKeyidWithResponse call +func ParseGetFleetEnrollmentApiKeysKeyidResponse(rsp *http.Response) (*GetFleetEnrollmentApiKeysKeyidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) UpdateDataViewDefaultWithBody(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateDataViewDefaultRequestWithBody(c.Server, spaceId, viewId, contentType, body) - if err != nil { - return nil, err + response := &GetFleetEnrollmentApiKeysKeyidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. + Active bool `json:"active"` + + // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. + ApiKey string `json:"api_key"` + + // ApiKeyId The ID of the API key in the Security API. + ApiKeyId string `json:"api_key_id"` + CreatedAt string `json:"created_at"` + Hidden *bool `json:"hidden,omitempty"` + Id string `json:"id"` + + // Name The name of the enrollment API key. + Name *string `json:"name,omitempty"` + + // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. + PolicyId *string `json:"policy_id,omitempty"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) UpdateDataViewDefault(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewUpdateDataViewDefaultRequest(c.Server, spaceId, viewId, body) +// ParsePostFleetEpmBulkAssetsResponse parses an HTTP response from a PostFleetEpmBulkAssetsWithResponse call +func ParsePostFleetEpmBulkAssetsResponse(rsp *http.Response) (*PostFleetEpmBulkAssetsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + response := &PostFleetEpmBulkAssetsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return c.Client.Do(req) -} -// NewDeleteAgentConfigurationRequest calls the generic DeleteAgentConfiguration builder with application/json body -func NewDeleteAgentConfigurationRequest(server string, params *DeleteAgentConfigurationParams, body DeleteAgentConfigurationJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + AppLink *string `json:"appLink,omitempty"` + Attributes struct { + Description *string `json:"description,omitempty"` + Service *string `json:"service,omitempty"` + Title *string `json:"title,omitempty"` + } `json:"attributes"` + Id string `json:"id"` + Type string `json:"type"` + UpdatedAt *string `json:"updatedAt,omitempty"` + } `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - bodyReader = bytes.NewReader(buf) - return NewDeleteAgentConfigurationRequestWithBody(server, params, "application/json", bodyReader) -} -// NewDeleteAgentConfigurationRequestWithBody generates requests for DeleteAgentConfiguration with any type of body -func NewDeleteAgentConfigurationRequestWithBody(server string, params *DeleteAgentConfigurationParams, contentType string, body io.Reader) (*http.Request, error) { - var err error + return response, nil +} - serverURL, err := url.Parse(server) +// ParseGetFleetEpmCategoriesResponse parses an HTTP response from a GetFleetEpmCategoriesWithResponse call +func ParseGetFleetEpmCategoriesResponse(rsp *http.Response) (*GetFleetEpmCategoriesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/apm/settings/agent-configuration") - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &GetFleetEpmCategoriesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + Count float32 `json:"count"` + Id string `json:"id"` + ParentId *string `json:"parent_id,omitempty"` + ParentTitle *string `json:"parent_title,omitempty"` + Title string `json:"title"` + } `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - req, err := http.NewRequest("DELETE", queryURL.String(), body) + return response, nil +} + +// ParsePostFleetEpmCustomIntegrationsResponse parses an HTTP response from a PostFleetEpmCustomIntegrationsWithResponse call +func ParsePostFleetEpmCustomIntegrationsResponse(rsp *http.Response) (*PostFleetEpmCustomIntegrationsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) - - if params != nil { - - var headerParam0 string + response := &PostFleetEpmCustomIntegrationsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) - if err != nil { + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + UnderscoreMeta struct { + InstallSource string `json:"install_source"` + Name string `json:"name"` + } `json:"_meta"` + Items []PostFleetEpmCustomIntegrations_200_Items_Item `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } + response.JSON200 = &dest - req.Header.Set("elastic-api-version", headerParam0) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest } - return req, nil + return response, nil } -// NewGetAgentConfigurationsRequest generates requests for GetAgentConfigurations -func NewGetAgentConfigurationsRequest(server string, params *GetAgentConfigurationsParams) (*http.Request, error) { - var err error - - serverURL, err := url.Parse(server) +// ParsePutFleetEpmCustomIntegrationsPkgnameResponse parses an HTTP response from a PutFleetEpmCustomIntegrationsPkgnameWithResponse call +func ParsePutFleetEpmCustomIntegrationsPkgnameResponse(rsp *http.Response) (*PutFleetEpmCustomIntegrationsPkgnameResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/apm/settings/agent-configuration") - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &PutFleetEpmCustomIntegrationsPkgnameResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - req, err := http.NewRequest("GET", queryURL.String(), nil) + return response, nil +} + +// ParseGetFleetEpmDataStreamsResponse parses an HTTP response from a GetFleetEpmDataStreamsWithResponse call +func ParseGetFleetEpmDataStreamsResponse(rsp *http.Response) (*GetFleetEpmDataStreamsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - - var headerParam0 string + response := &GetFleetEpmDataStreamsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) - if err != nil { + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + Name string `json:"name"` + } `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } + response.JSON200 = &dest - req.Header.Set("elastic-api-version", headerParam0) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest } - return req, nil + return response, nil } -// NewCreateUpdateAgentConfigurationRequest calls the generic CreateUpdateAgentConfiguration builder with application/json body -func NewCreateUpdateAgentConfigurationRequest(server string, params *CreateUpdateAgentConfigurationParams, body CreateUpdateAgentConfigurationJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) +// ParseGetFleetEpmPackagesResponse parses an HTTP response from a GetFleetEpmPackagesWithResponse call +func ParseGetFleetEpmPackagesResponse(rsp *http.Response) (*GetFleetEpmPackagesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - bodyReader = bytes.NewReader(buf) - return NewCreateUpdateAgentConfigurationRequestWithBody(server, params, "application/json", bodyReader) -} -// NewCreateUpdateAgentConfigurationRequestWithBody generates requests for CreateUpdateAgentConfiguration with any type of body -func NewCreateUpdateAgentConfigurationRequestWithBody(server string, params *CreateUpdateAgentConfigurationParams, contentType string, body io.Reader) (*http.Request, error) { - var err error + response := &GetFleetEpmPackagesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - serverURL, err := url.Parse(server) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []PackageListItem `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil +} + +// ParsePostFleetEpmPackagesResponse parses an HTTP response from a PostFleetEpmPackagesWithResponse call +func ParsePostFleetEpmPackagesResponse(rsp *http.Response) (*PostFleetEpmPackagesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/apm/settings/agent-configuration") - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &PostFleetEpmPackagesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) + return response, nil +} + +// ParsePostFleetEpmPackagesBulkResponse parses an HTTP response from a PostFleetEpmPackagesBulkWithResponse call +func ParsePostFleetEpmPackagesBulkResponse(rsp *http.Response) (*PostFleetEpmPackagesBulkResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.Overwrite != nil { + response := &PostFleetEpmPackagesBulkResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "overwrite", runtime.ParamLocationQuery, *params.Overwrite); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []PostFleetEpmPackagesBulk_200_Items_Item `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON400 = &dest - queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("PUT", queryURL.String(), body) + return response, nil +} + +// ParsePostFleetEpmPackagesBulkUninstallResponse parses an HTTP response from a PostFleetEpmPackagesBulkUninstallWithResponse call +func ParsePostFleetEpmPackagesBulkUninstallResponse(rsp *http.Response) (*PostFleetEpmPackagesBulkUninstallResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) - - if params != nil { - - var headerParam0 string + response := &PostFleetEpmPackagesBulkUninstallResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - headerParam0, err = runtime.StyleParamWithLocation("simple", false, "elastic-api-version", runtime.ParamLocationHeader, params.ElasticApiVersion) - if err != nil { + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + TaskId string `json:"taskId"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } + response.JSON200 = &dest - req.Header.Set("elastic-api-version", headerParam0) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest } - return req, nil + return response, nil } -func (c *Client) PostMaintenanceWindowWithBody(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostMaintenanceWindowRequestWithBody(c.Server, spaceId, contentType, body) +// ParseGetFleetEpmPackagesBulkUninstallTaskidResponse parses an HTTP response from a GetFleetEpmPackagesBulkUninstallTaskidWithResponse call +func ParseGetFleetEpmPackagesBulkUninstallTaskidResponse(rsp *http.Response) (*GetFleetEpmPackagesBulkUninstallTaskidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PostMaintenanceWindow(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPostMaintenanceWindowRequest(c.Server, spaceId, body) - if err != nil { - return nil, err + response := &GetFleetEpmPackagesBulkUninstallTaskidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Error *struct { + Message string `json:"message"` + } `json:"error,omitempty"` + Results *[]struct { + Error *struct { + Message string `json:"message"` + } `json:"error,omitempty"` + Name string `json:"name"` + Success bool `json:"success"` + } `json:"results,omitempty"` + Status string `json:"status"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) DeleteMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewDeleteMaintenanceWindowIdRequest(c.Server, spaceId, id) +// ParsePostFleetEpmPackagesBulkUpgradeResponse parses an HTTP response from a PostFleetEpmPackagesBulkUpgradeWithResponse call +func ParsePostFleetEpmPackagesBulkUpgradeResponse(rsp *http.Response) (*PostFleetEpmPackagesBulkUpgradeResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + response := &PostFleetEpmPackagesBulkUpgradeResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + TaskId string `json:"taskId"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) GetMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewGetMaintenanceWindowIdRequest(c.Server, spaceId, id) +// ParseGetFleetEpmPackagesBulkUpgradeTaskidResponse parses an HTTP response from a GetFleetEpmPackagesBulkUpgradeTaskidWithResponse call +func ParseGetFleetEpmPackagesBulkUpgradeTaskidResponse(rsp *http.Response) (*GetFleetEpmPackagesBulkUpgradeTaskidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err - } - return c.Client.Do(req) -} -func (c *Client) PatchMaintenanceWindowIdWithBody(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPatchMaintenanceWindowIdRequestWithBody(c.Server, spaceId, id, contentType, body) - if err != nil { - return nil, err + response := &GetFleetEpmPackagesBulkUpgradeTaskidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Error *struct { + Message string `json:"message"` + } `json:"error,omitempty"` + Results *[]struct { + Error *struct { + Message string `json:"message"` + } `json:"error,omitempty"` + Name string `json:"name"` + Success bool `json:"success"` + } `json:"results,omitempty"` + Status string `json:"status"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return c.Client.Do(req) + + return response, nil } -func (c *Client) PatchMaintenanceWindowId(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*http.Response, error) { - req, err := NewPatchMaintenanceWindowIdRequest(c.Server, spaceId, id, body) +// ParseGetFleetEpmPackagesInstalledResponse parses an HTTP response from a GetFleetEpmPackagesInstalledWithResponse call +func ParseGetFleetEpmPackagesInstalledResponse(rsp *http.Response) (*GetFleetEpmPackagesInstalledResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req = req.WithContext(ctx) - if err := c.applyEditors(ctx, req, reqEditors); err != nil { - return nil, err + + response := &GetFleetEpmPackagesInstalledResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return c.Client.Do(req) -} -// NewGetFleetAgentPoliciesRequest generates requests for GetFleetAgentPolicies -func NewGetFleetAgentPoliciesRequest(server string, params *GetFleetAgentPoliciesParams) (*http.Request, error) { - var err error + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + DataStreams []struct { + Name string `json:"name"` + Title string `json:"title"` + } `json:"dataStreams"` + Description *string `json:"description,omitempty"` + Icons *[]struct { + DarkMode *bool `json:"dark_mode,omitempty"` + Path *string `json:"path,omitempty"` + Size *string `json:"size,omitempty"` + Src string `json:"src"` + Title *string `json:"title,omitempty"` + Type *string `json:"type,omitempty"` + } `json:"icons,omitempty"` + Name string `json:"name"` + Status string `json:"status"` + Title *string `json:"title,omitempty"` + Version string `json:"version"` + } `json:"items"` + SearchAfter *[]GetFleetEpmPackagesInstalled_200_SearchAfter_Item `json:"searchAfter,omitempty"` + Total float32 `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - operationPath := fmt.Sprintf("/api/fleet/agent_policies") - if operationPath[0] == '/' { - operationPath = "." + operationPath } - queryURL, err := serverURL.Parse(operationPath) + return response, nil +} + +// ParseGetFleetEpmPackagesLimitedResponse parses an HTTP response from a GetFleetEpmPackagesLimitedWithResponse call +func ParseGetFleetEpmPackagesLimitedResponse(rsp *http.Response) (*GetFleetEpmPackagesLimitedResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.Page != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + response := &GetFleetEpmPackagesLimitedResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []string `json:"items"` } - - if params.PerPage != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest - if params.SortField != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortField", runtime.ParamLocationQuery, *params.SortField); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - - if params.SortOrder != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON400 = &dest - if params.ShowUpgradeable != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "showUpgradeable", runtime.ParamLocationQuery, *params.ShowUpgradeable); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + } - } + return response, nil +} - if params.Kuery != nil { +// ParseGetFleetEpmPackagesPkgnameStatsResponse parses an HTTP response from a GetFleetEpmPackagesPkgnameStatsWithResponse call +func ParseGetFleetEpmPackagesPkgnameStatsResponse(rsp *http.Response) (*GetFleetEpmPackagesPkgnameStatsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + response := &GetFleetEpmPackagesPkgnameStatsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Response struct { + AgentPolicyCount float32 `json:"agent_policy_count"` + PackagePolicyCount float32 `json:"package_policy_count"` + } `json:"response"` } - - if params.NoAgentCount != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "noAgentCount", runtime.ParamLocationQuery, *params.NoAgentCount); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest - if params.WithAgentCount != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withAgentCount", runtime.ParamLocationQuery, *params.WithAgentCount); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - if params.Full != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "full", runtime.ParamLocationQuery, *params.Full); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + } - } + return response, nil +} - if params.Format != nil { +// ParseDeleteFleetEpmPackagesPkgnamePkgversionResponse parses an HTTP response from a DeleteFleetEpmPackagesPkgnamePkgversionWithResponse call +func ParseDeleteFleetEpmPackagesPkgnamePkgversionResponse(rsp *http.Response) (*DeleteFleetEpmPackagesPkgnamePkgversionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + response := &DeleteFleetEpmPackagesPkgnamePkgversionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []DeleteFleetEpmPackagesPkgnamePkgversion_200_Items_Item `json:"items"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - queryURL.RawQuery = queryValues.Encode() - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err } - return req, nil + return response, nil } -// NewPostFleetAgentPoliciesRequest calls the generic PostFleetAgentPolicies builder with application/json body -func NewPostFleetAgentPoliciesRequest(server string, params *PostFleetAgentPoliciesParams, body PostFleetAgentPoliciesJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) +// ParseGetFleetEpmPackagesPkgnamePkgversionResponse parses an HTTP response from a GetFleetEpmPackagesPkgnamePkgversionWithResponse call +func ParseGetFleetEpmPackagesPkgnamePkgversionResponse(rsp *http.Response) (*GetFleetEpmPackagesPkgnamePkgversionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPostFleetAgentPoliciesRequestWithBody(server, params, "application/json", bodyReader) -} - -// NewPostFleetAgentPoliciesRequestWithBody generates requests for PostFleetAgentPolicies with any type of body -func NewPostFleetAgentPoliciesRequestWithBody(server string, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader) (*http.Request, error) { - var err error - serverURL, err := url.Parse(server) - if err != nil { - return nil, err + response := &GetFleetEpmPackagesPkgnamePkgversionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - operationPath := fmt.Sprintf("/api/fleet/agent_policies") - if operationPath[0] == '/' { - operationPath = "." + operationPath + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item PackageInfo `json:"item"` + Metadata *struct { + HasPolicies bool `json:"has_policies"` + } `json:"metadata,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - queryURL, err := serverURL.Parse(operationPath) + return response, nil +} + +// ParsePostFleetEpmPackagesPkgnamePkgversionResponse parses an HTTP response from a PostFleetEpmPackagesPkgnamePkgversionWithResponse call +func ParsePostFleetEpmPackagesPkgnamePkgversionResponse(rsp *http.Response) (*PostFleetEpmPackagesPkgnamePkgversionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.SysMonitoring != nil { + response := &PostFleetEpmPackagesPkgnamePkgversionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sys_monitoring", runtime.ParamLocationQuery, *params.SysMonitoring); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + UnderscoreMeta struct { + InstallSource string `json:"install_source"` + Name string `json:"name"` + } `json:"_meta"` + Items []PostFleetEpmPackagesPkgnamePkgversion_200_Items_Item `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("POST", queryURL.String(), body) + return response, nil +} + +// ParsePutFleetEpmPackagesPkgnamePkgversionResponse parses an HTTP response from a PutFleetEpmPackagesPkgnamePkgversionWithResponse call +func ParsePutFleetEpmPackagesPkgnamePkgversionResponse(rsp *http.Response) (*PutFleetEpmPackagesPkgnamePkgversionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) + response := &PutFleetEpmPackagesPkgnamePkgversionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item PutFleetEpmPackagesPkgnamePkgversion_200_Item `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// NewPostFleetAgentPoliciesDeleteRequest calls the generic PostFleetAgentPoliciesDelete builder with application/json body -func NewPostFleetAgentPoliciesDeleteRequest(server string, body PostFleetAgentPoliciesDeleteJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPostFleetAgentPoliciesDeleteRequestWithBody(server, "application/json", bodyReader) -} -// NewPostFleetAgentPoliciesDeleteRequestWithBody generates requests for PostFleetAgentPoliciesDelete with any type of body -func NewPostFleetAgentPoliciesDeleteRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { - var err error + return response, nil +} - serverURL, err := url.Parse(server) +// ParseDeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsResponse parses an HTTP response from a DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsWithResponse call +func ParseDeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsResponse(rsp *http.Response) (*DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/fleet/agent_policies/delete") - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &DeleteFleetEpmPackagesPkgnamePkgversionDatastreamAssetsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Success bool `json:"success"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - req.Header.Add("Content-Type", contentType) + } - return req, nil + return response, nil } -// NewGetFleetAgentPoliciesAgentpolicyidRequest generates requests for GetFleetAgentPoliciesAgentpolicyid -func NewGetFleetAgentPoliciesAgentpolicyidRequest(server string, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidParams) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentPolicyId", runtime.ParamLocationPath, agentPolicyId) +// ParseDeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse parses an HTTP response from a DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithResponse call +func ParseDeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse(rsp *http.Response) (*DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - serverURL, err := url.Parse(server) - if err != nil { - return nil, err + response := &DeleteFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - operationPath := fmt.Sprintf("/api/fleet/agent_policies/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Success bool `json:"success"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - queryURL, err := serverURL.Parse(operationPath) + return response, nil +} + +// ParsePostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse parses an HTTP response from a PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsWithResponse call +func ParsePostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse(rsp *http.Response) (*PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.Format != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + response := &PostFleetEpmPackagesPkgnamePkgversionKibanaAssetsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Success bool `json:"success"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest - queryURL.RawQuery = queryValues.Encode() - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err } - return req, nil + return response, nil } -// NewPutFleetAgentPoliciesAgentpolicyidRequest calls the generic PutFleetAgentPoliciesAgentpolicyid builder with application/json body -func NewPutFleetAgentPoliciesAgentpolicyidRequest(server string, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, body PutFleetAgentPoliciesAgentpolicyidJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) +// ParsePostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse parses an HTTP response from a PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeWithResponse call +func ParsePostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse(rsp *http.Response) (*PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPutFleetAgentPoliciesAgentpolicyidRequestWithBody(server, agentPolicyId, params, "application/json", bodyReader) -} -// NewPutFleetAgentPoliciesAgentpolicyidRequestWithBody generates requests for PutFleetAgentPoliciesAgentpolicyid with any type of body -func NewPutFleetAgentPoliciesAgentpolicyidRequestWithBody(server string, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, contentType string, body io.Reader) (*http.Request, error) { - var err error + response := &PostFleetEpmPackagesPkgnamePkgversionTransformsAuthorizeResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - var pathParam0 string + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []struct { + Error interface{} `json:"error"` + Success bool `json:"success"` + TransformId string `json:"transformId"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "agentPolicyId", runtime.ParamLocationPath, agentPolicyId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/api/fleet/agent_policies/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseGetFleetEpmPackagesPkgnamePkgversionFilepathResponse parses an HTTP response from a GetFleetEpmPackagesPkgnamePkgversionFilepathWithResponse call +func ParseGetFleetEpmPackagesPkgnamePkgversionFilepathResponse(rsp *http.Response) (*GetFleetEpmPackagesPkgnamePkgversionFilepathResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.Format != nil { + response := &GetFleetEpmPackagesPkgnamePkgversionFilepathResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("PUT", queryURL.String(), body) + return response, nil +} + +// ParseGetFleetEpmTemplatesPkgnamePkgversionInputsResponse parses an HTTP response from a GetFleetEpmTemplatesPkgnamePkgversionInputsWithResponse call +func ParseGetFleetEpmTemplatesPkgnamePkgversionInputsResponse(rsp *http.Response) (*GetFleetEpmTemplatesPkgnamePkgversionInputsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) + response := &GetFleetEpmTemplatesPkgnamePkgversionInputsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewGetFleetEnrollmentApiKeysRequest generates requests for GetFleetEnrollmentApiKeys -func NewGetFleetEnrollmentApiKeysRequest(server string, params *GetFleetEnrollmentApiKeysParams) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/api/fleet/enrollment_api_keys") - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseGetFleetEpmVerificationKeyIdResponse parses an HTTP response from a GetFleetEpmVerificationKeyIdWithResponse call +func ParseGetFleetEpmVerificationKeyIdResponse(rsp *http.Response) (*GetFleetEpmVerificationKeyIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.Page != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + response := &GetFleetEpmVerificationKeyIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Id *string `json:"id"` } - - if params.PerPage != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest - if params.Kuery != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - queryURL.RawQuery = queryValues.Encode() - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err } - return req, nil + return response, nil } -// NewGetFleetEpmPackagesRequest generates requests for GetFleetEpmPackages -func NewGetFleetEpmPackagesRequest(server string, params *GetFleetEpmPackagesParams) (*http.Request, error) { - var err error - - serverURL, err := url.Parse(server) +// ParseGetFleetFleetServerHostsResponse parses an HTTP response from a GetFleetFleetServerHostsWithResponse call +func ParseGetFleetFleetServerHostsResponse(rsp *http.Response) (*GetFleetFleetServerHostsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/fleet/epm/packages") - if operationPath[0] == '/' { - operationPath = "." + operationPath - } - - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err + response := &GetFleetFleetServerHostsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - if params != nil { - queryValues := queryURL.Query() - - if params.Category != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "category", runtime.ParamLocationQuery, *params.Category); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []ServerHost `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` } - - if params.Prerelease != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest - if params.ExcludeInstallStatus != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "excludeInstallStatus", runtime.ParamLocationQuery, *params.ExcludeInstallStatus); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - - if params.WithPackagePoliciesCount != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withPackagePoliciesCount", runtime.ParamLocationQuery, *params.WithPackagePoliciesCount); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON400 = &dest - queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("GET", queryURL.String(), nil) + return response, nil +} + +// ParsePostFleetFleetServerHostsResponse parses an HTTP response from a PostFleetFleetServerHostsWithResponse call +func ParsePostFleetFleetServerHostsResponse(rsp *http.Response) (*PostFleetFleetServerHostsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return req, nil -} + response := &PostFleetFleetServerHostsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// NewPostFleetEpmPackagesRequestWithBody generates requests for PostFleetEpmPackages with any type of body -func NewPostFleetEpmPackagesRequestWithBody(server string, params *PostFleetEpmPackagesParams, contentType string, body io.Reader) (*http.Request, error) { - var err error + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item ServerHost `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - operationPath := fmt.Sprintf("/api/fleet/epm/packages") - if operationPath[0] == '/' { - operationPath = "." + operationPath } - queryURL, err := serverURL.Parse(operationPath) + return response, nil +} + +// ParseDeleteFleetFleetServerHostsItemidResponse parses an HTTP response from a DeleteFleetFleetServerHostsItemidWithResponse call +func ParseDeleteFleetFleetServerHostsItemidResponse(rsp *http.Response) (*DeleteFleetFleetServerHostsItemidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.IgnoreMappingUpdateErrors != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreMappingUpdateErrors", runtime.ParamLocationQuery, *params.IgnoreMappingUpdateErrors); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + response := &DeleteFleetFleetServerHostsItemidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Id string `json:"id"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - if params.SkipDataStreamRollover != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "skipDataStreamRollover", runtime.ParamLocationQuery, *params.SkipDataStreamRollover); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON400 = &dest - queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("POST", queryURL.String(), body) + return response, nil +} + +// ParseGetFleetFleetServerHostsItemidResponse parses an HTTP response from a GetFleetFleetServerHostsItemidWithResponse call +func ParseGetFleetFleetServerHostsItemidResponse(rsp *http.Response) (*GetFleetFleetServerHostsItemidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) - - return req, nil -} + response := &GetFleetFleetServerHostsItemidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// NewDeleteFleetEpmPackagesPkgnamePkgversionRequest generates requests for DeleteFleetEpmPackagesPkgnamePkgversion -func NewDeleteFleetEpmPackagesPkgnamePkgversionRequest(server string, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionParams) (*http.Request, error) { - var err error + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item ServerHost `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) - if err != nil { - return nil, err } - var pathParam1 string + return response, nil +} - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) +// ParsePutFleetFleetServerHostsItemidResponse parses an HTTP response from a PutFleetFleetServerHostsItemidWithResponse call +func ParsePutFleetFleetServerHostsItemidResponse(rsp *http.Response) (*PutFleetFleetServerHostsItemidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - serverURL, err := url.Parse(server) - if err != nil { - return nil, err + response := &PutFleetFleetServerHostsItemidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item ServerHost `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - queryURL, err := serverURL.Parse(operationPath) + return response, nil +} + +// ParsePostFleetHealthCheckResponse parses an HTTP response from a PostFleetHealthCheckWithResponse call +func ParsePostFleetHealthCheckResponse(rsp *http.Response) (*PostFleetHealthCheckResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() + response := &PostFleetHealthCheckResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - if params.Force != nil { + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + HostId *string `json:"host_id,omitempty"` + Name *string `json:"name,omitempty"` + Status string `json:"status"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "force", runtime.ParamLocationQuery, *params.Force); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) + return response, nil +} + +// ParseGetFleetKubernetesResponse parses an HTTP response from a GetFleetKubernetesWithResponse call +func ParseGetFleetKubernetesResponse(rsp *http.Response) (*GetFleetKubernetesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return req, nil -} + response := &GetFleetKubernetesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// NewGetFleetEpmPackagesPkgnamePkgversionRequest generates requests for GetFleetEpmPackagesPkgnamePkgversion -func NewGetFleetEpmPackagesPkgnamePkgversionRequest(server string, pkgName string, pkgVersion string, params *GetFleetEpmPackagesPkgnamePkgversionParams) (*http.Request, error) { - var err error + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item string `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) - if err != nil { - return nil, err } - var pathParam1 string + return response, nil +} - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) +// ParseGetFleetKubernetesDownloadResponse parses an HTTP response from a GetFleetKubernetesDownloadWithResponse call +func ParseGetFleetKubernetesDownloadResponse(rsp *http.Response) (*GetFleetKubernetesDownloadResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - serverURL, err := url.Parse(server) - if err != nil { - return nil, err + response := &GetFleetKubernetesDownloadResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest string + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + } - queryURL, err := serverURL.Parse(operationPath) + return response, nil +} + +// ParsePostFleetLogstashApiKeysResponse parses an HTTP response from a PostFleetLogstashApiKeysWithResponse call +func ParsePostFleetLogstashApiKeysResponse(rsp *http.Response) (*PostFleetLogstashApiKeysResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.IgnoreUnverified != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreUnverified", runtime.ParamLocationQuery, *params.IgnoreUnverified); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + response := &PostFleetLogstashApiKeysResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + ApiKey string `json:"api_key"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - if params.Prerelease != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - if params.Full != nil { + } - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "full", runtime.ParamLocationQuery, *params.Full); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + return response, nil +} - } +// ParsePostFleetMessageSigningServiceRotateKeyPairResponse parses an HTTP response from a PostFleetMessageSigningServiceRotateKeyPairWithResponse call +func ParsePostFleetMessageSigningServiceRotateKeyPairResponse(rsp *http.Response) (*PostFleetMessageSigningServiceRotateKeyPairResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - if params.WithMetadata != nil { + response := &PostFleetMessageSigningServiceRotateKeyPairResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withMetadata", runtime.ParamLocationQuery, *params.WithMetadata); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Message string `json:"message"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - queryURL.RawQuery = queryValues.Encode() - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err } - return req, nil + return response, nil } -// NewPostFleetEpmPackagesPkgnamePkgversionRequest calls the generic PostFleetEpmPackagesPkgnamePkgversion builder with application/json body -func NewPostFleetEpmPackagesPkgnamePkgversionRequest(server string, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, body PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) +// ParseGetFleetOutputsResponse parses an HTTP response from a GetFleetOutputsWithResponse call +func ParseGetFleetOutputsResponse(rsp *http.Response) (*GetFleetOutputsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPostFleetEpmPackagesPkgnamePkgversionRequestWithBody(server, pkgName, pkgVersion, params, "application/json", bodyReader) -} -// NewPostFleetEpmPackagesPkgnamePkgversionRequestWithBody generates requests for PostFleetEpmPackagesPkgnamePkgversion with any type of body -func NewPostFleetEpmPackagesPkgnamePkgversionRequestWithBody(server string, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, contentType string, body io.Reader) (*http.Request, error) { - var err error + response := &GetFleetOutputsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - var pathParam0 string + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []OutputUnion `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "pkgName", runtime.ParamLocationPath, pkgName) - if err != nil { - return nil, err } - var pathParam1 string + return response, nil +} - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "pkgVersion", runtime.ParamLocationPath, pkgVersion) +// ParsePostFleetOutputsResponse parses an HTTP response from a PostFleetOutputsWithResponse call +func ParsePostFleetOutputsResponse(rsp *http.Response) (*PostFleetOutputsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - serverURL, err := url.Parse(server) - if err != nil { - return nil, err + response := &PostFleetOutputsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - operationPath := fmt.Sprintf("/api/fleet/epm/packages/%s/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item OutputUnion `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - queryURL, err := serverURL.Parse(operationPath) + return response, nil +} + +// ParseDeleteFleetOutputsOutputidResponse parses an HTTP response from a DeleteFleetOutputsOutputidWithResponse call +func ParseDeleteFleetOutputsOutputidResponse(rsp *http.Response) (*DeleteFleetOutputsOutputidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.Prerelease != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "prerelease", runtime.ParamLocationQuery, *params.Prerelease); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + response := &DeleteFleetOutputsOutputidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Id string `json:"id"` } - - if params.IgnoreMappingUpdateErrors != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "ignoreMappingUpdateErrors", runtime.ParamLocationQuery, *params.IgnoreMappingUpdateErrors); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest - if params.SkipDataStreamRollover != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "skipDataStreamRollover", runtime.ParamLocationQuery, *params.SkipDataStreamRollover); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON404 = &dest - queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("POST", queryURL.String(), body) + return response, nil +} + +// ParseGetFleetOutputsOutputidResponse parses an HTTP response from a GetFleetOutputsOutputidWithResponse call +func ParseGetFleetOutputsOutputidResponse(rsp *http.Response) (*GetFleetOutputsOutputidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) + response := &GetFleetOutputsOutputidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item OutputUnion `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewGetFleetFleetServerHostsRequest generates requests for GetFleetFleetServerHosts -func NewGetFleetFleetServerHostsRequest(server string) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts") - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParsePutFleetOutputsOutputidResponse parses an HTTP response from a PutFleetOutputsOutputidWithResponse call +func ParsePutFleetOutputsOutputidResponse(rsp *http.Response) (*PutFleetOutputsOutputidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err + response := &PutFleetOutputsOutputidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item OutputUnion `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// NewPostFleetFleetServerHostsRequest calls the generic PostFleetFleetServerHosts builder with application/json body -func NewPostFleetFleetServerHostsRequest(server string, body PostFleetFleetServerHostsJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPostFleetFleetServerHostsRequestWithBody(server, "application/json", bodyReader) -} -// NewPostFleetFleetServerHostsRequestWithBody generates requests for PostFleetFleetServerHosts with any type of body -func NewPostFleetFleetServerHostsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { - var err error + return response, nil +} - serverURL, err := url.Parse(server) +// ParseGetFleetOutputsOutputidHealthResponse parses an HTTP response from a GetFleetOutputsOutputidHealthWithResponse call +func ParseGetFleetOutputsOutputidHealthResponse(rsp *http.Response) (*GetFleetOutputsOutputidHealthResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts") - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &GetFleetOutputsOutputidHealthResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Message long message if unhealthy + Message string `json:"message"` - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err - } + // State state of output, HEALTHY or DEGRADED + State string `json:"state"` - req.Header.Add("Content-Type", contentType) + // Timestamp timestamp of reported state + Timestamp string `json:"timestamp"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - return req, nil -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// NewDeleteFleetFleetServerHostsItemidRequest generates requests for DeleteFleetFleetServerHostsItemid -func NewDeleteFleetFleetServerHostsItemidRequest(server string, itemId string) (*http.Request, error) { - var err error + } - var pathParam0 string + return response, nil +} - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "itemId", runtime.ParamLocationPath, itemId) +// ParseGetFleetPackagePoliciesResponse parses an HTTP response from a GetFleetPackagePoliciesWithResponse call +func ParseGetFleetPackagePoliciesResponse(rsp *http.Response) (*GetFleetPackagePoliciesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - serverURL, err := url.Parse(server) - if err != nil { - return nil, err + response := &GetFleetPackagePoliciesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []PackagePolicy `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - queryURL, err := serverURL.Parse(operationPath) + return response, nil +} + +// ParsePostFleetPackagePoliciesResponse parses an HTTP response from a PostFleetPackagePoliciesWithResponse call +func ParsePostFleetPackagePoliciesResponse(rsp *http.Response) (*PostFleetPackagePoliciesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) - if err != nil { - return nil, err + response := &PostFleetPackagePoliciesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item PackagePolicy `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewGetFleetFleetServerHostsItemidRequest generates requests for GetFleetFleetServerHostsItemid -func NewGetFleetFleetServerHostsItemidRequest(server string, itemId string) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "itemId", runtime.ParamLocationPath, itemId) - if err != nil { - return nil, err } - serverURL, err := url.Parse(server) + return response, nil +} + +// ParsePostFleetPackagePoliciesBulkGetResponse parses an HTTP response from a PostFleetPackagePoliciesBulkGetWithResponse call +func ParsePostFleetPackagePoliciesBulkGetResponse(rsp *http.Response) (*PostFleetPackagePoliciesBulkGetResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &PostFleetPackagePoliciesBulkGetResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + // AdditionalDatastreamsPermissions Additional datastream permissions, that will be added to the agent policy. + AdditionalDatastreamsPermissions *[]string `json:"additional_datastreams_permissions"` + Agents *float32 `json:"agents,omitempty"` + CreatedAt string `json:"created_at"` + CreatedBy string `json:"created_by"` + + // Description Package policy description + Description *string `json:"description,omitempty"` + Elasticsearch *PostFleetPackagePoliciesBulkGet_200_Items_Elasticsearch `json:"elasticsearch,omitempty"` + Enabled bool `json:"enabled"` + Id string `json:"id"` + Inputs PostFleetPackagePoliciesBulkGet_200_Items_Inputs `json:"inputs"` + IsManaged *bool `json:"is_managed,omitempty"` + + // Name Package policy name (should be unique) + Name string `json:"name"` + + // Namespace The package policy namespace. Leave blank to inherit the agent policy's namespace. + Namespace *string `json:"namespace,omitempty"` + OutputId *string `json:"output_id"` + + // Overrides Override settings that are defined in the package policy. The override option should be used only in unusual circumstances and not as a routine procedure. + Overrides *struct { + Inputs *map[string]interface{} `json:"inputs,omitempty"` + } `json:"overrides"` + Package *struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` + } `json:"package,omitempty"` + + // PolicyId Agent policy ID where that package policy will be added + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id"` + PolicyIds *[]string `json:"policy_ids,omitempty"` + Revision float32 `json:"revision"` + SecretReferences *[]struct { + Id string `json:"id"` + } `json:"secret_references,omitempty"` + SpaceIds *[]string `json:"spaceIds,omitempty"` + + // SupportsAgentless Indicates whether the package policy belongs to an agentless agent policy. + SupportsAgentless *bool `json:"supports_agentless"` + UpdatedAt string `json:"updated_at"` + UpdatedBy string `json:"updated_by"` + Vars *PostFleetPackagePoliciesBulkGet_200_Items_Vars `json:"vars,omitempty"` + Version *string `json:"version,omitempty"` + } `json:"items"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest struct { + Message string `json:"message"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err } - return req, nil + return response, nil } -// NewPutFleetFleetServerHostsItemidRequest calls the generic PutFleetFleetServerHostsItemid builder with application/json body -func NewPutFleetFleetServerHostsItemidRequest(server string, itemId string, body PutFleetFleetServerHostsItemidJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) +// ParsePostFleetPackagePoliciesDeleteResponse parses an HTTP response from a PostFleetPackagePoliciesDeleteWithResponse call +func ParsePostFleetPackagePoliciesDeleteResponse(rsp *http.Response) (*PostFleetPackagePoliciesDeleteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPutFleetFleetServerHostsItemidRequestWithBody(server, itemId, "application/json", bodyReader) -} -// NewPutFleetFleetServerHostsItemidRequestWithBody generates requests for PutFleetFleetServerHostsItemid with any type of body -func NewPutFleetFleetServerHostsItemidRequestWithBody(server string, itemId string, contentType string, body io.Reader) (*http.Request, error) { - var err error + response := &PostFleetPackagePoliciesDeleteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - var pathParam0 string + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []struct { + Body *struct { + Message string `json:"message"` + } `json:"body,omitempty"` + Id string `json:"id"` + Name *string `json:"name,omitempty"` + OutputId *string `json:"output_id"` + Package struct { + ExperimentalDataStreamFeatures *[]struct { + DataStream string `json:"data_stream"` + Features struct { + DocValueOnlyNumeric *bool `json:"doc_value_only_numeric,omitempty"` + DocValueOnlyOther *bool `json:"doc_value_only_other,omitempty"` + SyntheticSource *bool `json:"synthetic_source,omitempty"` + Tsdb *bool `json:"tsdb,omitempty"` + } `json:"features"` + } `json:"experimental_data_stream_features,omitempty"` + + // Name Package name + Name string `json:"name"` + RequiresRoot *bool `json:"requires_root,omitempty"` + Title *string `json:"title,omitempty"` + + // Version Package version + Version string `json:"version"` + } `json:"package"` + + // PolicyId Use `policy_ids` instead + // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set + PolicyId *string `json:"policy_id"` + PolicyIds []string `json:"policy_ids"` + StatusCode *float32 `json:"statusCode,omitempty"` + Success bool `json:"success"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "itemId", runtime.ParamLocationPath, itemId) - if err != nil { - return nil, err } - serverURL, err := url.Parse(server) + return response, nil +} + +// ParsePostFleetPackagePoliciesUpgradeResponse parses an HTTP response from a PostFleetPackagePoliciesUpgradeWithResponse call +func ParsePostFleetPackagePoliciesUpgradeResponse(rsp *http.Response) (*PostFleetPackagePoliciesUpgradeResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/fleet/fleet_server_hosts/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &PostFleetPackagePoliciesUpgradeResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []struct { + Body *struct { + Message string `json:"message"` + } `json:"body,omitempty"` + Id string `json:"id"` + Name *string `json:"name,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + Success bool `json:"success"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - req, err := http.NewRequest("PUT", queryURL.String(), body) + return response, nil +} + +// ParsePostFleetPackagePoliciesUpgradeDryrunResponse parses an HTTP response from a PostFleetPackagePoliciesUpgradeDryrunWithResponse call +func ParsePostFleetPackagePoliciesUpgradeDryrunResponse(rsp *http.Response) (*PostFleetPackagePoliciesUpgradeDryrunResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) + response := &PostFleetPackagePoliciesUpgradeDryrunResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []struct { + AgentDiff *[][]PostFleetPackagePoliciesUpgradeDryrun_200_AgentDiff_Item `json:"agent_diff,omitempty"` + Body *struct { + Message string `json:"message"` + } `json:"body,omitempty"` + Diff *[]PostFleetPackagePoliciesUpgradeDryrun_200_Diff_Item `json:"diff,omitempty"` + HasErrors bool `json:"hasErrors"` + Name *string `json:"name,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewGetFleetOutputsRequest generates requests for GetFleetOutputs -func NewGetFleetOutputsRequest(server string) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/api/fleet/outputs") - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseDeleteFleetPackagePoliciesPackagepolicyidResponse parses an HTTP response from a DeleteFleetPackagePoliciesPackagepolicyidWithResponse call +func ParseDeleteFleetPackagePoliciesPackagepolicyidResponse(rsp *http.Response) (*DeleteFleetPackagePoliciesPackagepolicyidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err + response := &DeleteFleetPackagePoliciesPackagepolicyidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Id string `json:"id"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// NewPostFleetOutputsRequest calls the generic PostFleetOutputs builder with application/json body -func NewPostFleetOutputsRequest(server string, body PostFleetOutputsJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPostFleetOutputsRequestWithBody(server, "application/json", bodyReader) -} -// NewPostFleetOutputsRequestWithBody generates requests for PostFleetOutputs with any type of body -func NewPostFleetOutputsRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { - var err error + return response, nil +} - serverURL, err := url.Parse(server) +// ParseGetFleetPackagePoliciesPackagepolicyidResponse parses an HTTP response from a GetFleetPackagePoliciesPackagepolicyidWithResponse call +func ParseGetFleetPackagePoliciesPackagepolicyidResponse(rsp *http.Response) (*GetFleetPackagePoliciesPackagepolicyidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/fleet/outputs") - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &GetFleetPackagePoliciesPackagepolicyidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item PackagePolicy `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest struct { + Message string `json:"message"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err } - req.Header.Add("Content-Type", contentType) - - return req, nil + return response, nil } -// NewDeleteFleetOutputsOutputidRequest generates requests for DeleteFleetOutputsOutputid -func NewDeleteFleetOutputsOutputidRequest(server string, outputId string) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "outputId", runtime.ParamLocationPath, outputId) +// ParsePutFleetPackagePoliciesPackagepolicyidResponse parses an HTTP response from a PutFleetPackagePoliciesPackagepolicyidWithResponse call +func ParsePutFleetPackagePoliciesPackagepolicyidResponse(rsp *http.Response) (*PutFleetPackagePoliciesPackagepolicyidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - serverURL, err := url.Parse(server) - if err != nil { - return nil, err + response := &PutFleetPackagePoliciesPackagepolicyidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - operationPath := fmt.Sprintf("/api/fleet/outputs/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item PackagePolicy `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - req, err := http.NewRequest("DELETE", queryURL.String(), nil) - if err != nil { - return nil, err } - return req, nil + return response, nil } -// NewGetFleetOutputsOutputidRequest generates requests for GetFleetOutputsOutputid -func NewGetFleetOutputsOutputidRequest(server string, outputId string) (*http.Request, error) { - var err error - - var pathParam0 string - - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "outputId", runtime.ParamLocationPath, outputId) +// ParseGetFleetProxiesResponse parses an HTTP response from a GetFleetProxiesWithResponse call +func ParseGetFleetProxiesResponse(rsp *http.Response) (*GetFleetProxiesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - serverURL, err := url.Parse(server) - if err != nil { - return nil, err + response := &GetFleetProxiesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - operationPath := fmt.Sprintf("/api/fleet/outputs/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + Certificate *string `json:"certificate"` + CertificateAuthorities *string `json:"certificate_authorities"` + CertificateKey *string `json:"certificate_key"` + Id string `json:"id"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyHeaders *map[string]GetFleetProxies_200_Items_ProxyHeaders_AdditionalProperties `json:"proxy_headers"` + Url string `json:"url"` + } `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err } - return req, nil + return response, nil } -// NewPutFleetOutputsOutputidRequest calls the generic PutFleetOutputsOutputid builder with application/json body -func NewPutFleetOutputsOutputidRequest(server string, outputId string, body PutFleetOutputsOutputidJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) +// ParsePostFleetProxiesResponse parses an HTTP response from a PostFleetProxiesWithResponse call +func ParsePostFleetProxiesResponse(rsp *http.Response) (*PostFleetProxiesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPutFleetOutputsOutputidRequestWithBody(server, outputId, "application/json", bodyReader) -} -// NewPutFleetOutputsOutputidRequestWithBody generates requests for PutFleetOutputsOutputid with any type of body -func NewPutFleetOutputsOutputidRequestWithBody(server string, outputId string, contentType string, body io.Reader) (*http.Request, error) { - var err error + response := &PostFleetProxiesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - var pathParam0 string + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + Certificate *string `json:"certificate"` + CertificateAuthorities *string `json:"certificate_authorities"` + CertificateKey *string `json:"certificate_key"` + Id string `json:"id"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyHeaders *map[string]PostFleetProxies_200_Item_ProxyHeaders_AdditionalProperties `json:"proxy_headers"` + Url string `json:"url"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "outputId", runtime.ParamLocationPath, outputId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/api/fleet/outputs/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseDeleteFleetProxiesItemidResponse parses an HTTP response from a DeleteFleetProxiesItemidWithResponse call +func ParseDeleteFleetProxiesItemidResponse(rsp *http.Response) (*DeleteFleetProxiesItemidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("PUT", queryURL.String(), body) - if err != nil { - return nil, err + response := &DeleteFleetProxiesItemidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req.Header.Add("Content-Type", contentType) - - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Id string `json:"id"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewGetFleetPackagePoliciesRequest generates requests for GetFleetPackagePolicies -func NewGetFleetPackagePoliciesRequest(server string, params *GetFleetPackagePoliciesParams) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/api/fleet/package_policies") - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseGetFleetProxiesItemidResponse parses an HTTP response from a GetFleetProxiesItemidWithResponse call +func ParseGetFleetProxiesItemidResponse(rsp *http.Response) (*GetFleetProxiesItemidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.Page != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "page", runtime.ParamLocationQuery, *params.Page); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + response := &GetFleetProxiesItemidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + Certificate *string `json:"certificate"` + CertificateAuthorities *string `json:"certificate_authorities"` + CertificateKey *string `json:"certificate_key"` + Id string `json:"id"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyHeaders *map[string]GetFleetProxiesItemid_200_Item_ProxyHeaders_AdditionalProperties `json:"proxy_headers"` + Url string `json:"url"` + } `json:"item"` } - - if params.PerPage != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "perPage", runtime.ParamLocationQuery, *params.PerPage); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest - if params.SortField != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortField", runtime.ParamLocationQuery, *params.SortField); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } - - if params.SortOrder != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "sortOrder", runtime.ParamLocationQuery, *params.SortOrder); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON400 = &dest - if params.ShowUpgradeable != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "showUpgradeable", runtime.ParamLocationQuery, *params.ShowUpgradeable); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + } - } + return response, nil +} - if params.Kuery != nil { +// ParsePutFleetProxiesItemidResponse parses an HTTP response from a PutFleetProxiesItemidWithResponse call +func ParsePutFleetProxiesItemidResponse(rsp *http.Response) (*PutFleetProxiesItemidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "kuery", runtime.ParamLocationQuery, *params.Kuery); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + response := &PutFleetProxiesItemidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + Certificate *string `json:"certificate"` + CertificateAuthorities *string `json:"certificate_authorities"` + CertificateKey *string `json:"certificate_key"` + Id string `json:"id"` + IsPreconfigured *bool `json:"is_preconfigured,omitempty"` + Name string `json:"name"` + ProxyHeaders *map[string]PutFleetProxiesItemid_200_Item_ProxyHeaders_AdditionalProperties `json:"proxy_headers"` + Url string `json:"url"` + } `json:"item"` } - - if params.Format != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest - if params.WithAgentCount != nil { - - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "withAgentCount", runtime.ParamLocationQuery, *params.WithAgentCount); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } - + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - queryURL.RawQuery = queryValues.Encode() - } - - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err } - return req, nil + return response, nil } -// NewPostFleetPackagePoliciesRequest calls the generic PostFleetPackagePolicies builder with application/json body -func NewPostFleetPackagePoliciesRequest(server string, params *PostFleetPackagePoliciesParams, body PostFleetPackagePoliciesJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) +// ParseGetFleetRemoteSyncedIntegrationsStatusResponse parses an HTTP response from a GetFleetRemoteSyncedIntegrationsStatusWithResponse call +func ParseGetFleetRemoteSyncedIntegrationsStatusResponse(rsp *http.Response) (*GetFleetRemoteSyncedIntegrationsStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPostFleetPackagePoliciesRequestWithBody(server, params, "application/json", bodyReader) -} - -// NewPostFleetPackagePoliciesRequestWithBody generates requests for PostFleetPackagePolicies with any type of body -func NewPostFleetPackagePoliciesRequestWithBody(server string, params *PostFleetPackagePoliciesParams, contentType string, body io.Reader) (*http.Request, error) { - var err error - serverURL, err := url.Parse(server) - if err != nil { - return nil, err + response := &GetFleetRemoteSyncedIntegrationsStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - operationPath := fmt.Sprintf("/api/fleet/package_policies") - if operationPath[0] == '/' { - operationPath = "." + operationPath + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + CustomAssets *map[string]struct { + Error *string `json:"error,omitempty"` + IsDeleted *bool `json:"is_deleted,omitempty"` + Name string `json:"name"` + PackageName string `json:"package_name"` + PackageVersion string `json:"package_version"` + SyncStatus GetFleetRemoteSyncedIntegrationsStatus200CustomAssetsSyncStatus `json:"sync_status"` + Type string `json:"type"` + } `json:"custom_assets,omitempty"` + Error *string `json:"error,omitempty"` + Integrations []struct { + Error *string `json:"error,omitempty"` + Id *string `json:"id,omitempty"` + InstallStatus struct { + Main string `json:"main"` + Remote *string `json:"remote,omitempty"` + } `json:"install_status"` + PackageName *string `json:"package_name,omitempty"` + PackageVersion *string `json:"package_version,omitempty"` + SyncStatus GetFleetRemoteSyncedIntegrationsStatus200IntegrationsSyncStatus `json:"sync_status"` + UpdatedAt *string `json:"updated_at,omitempty"` + Warning *struct { + Message *string `json:"message,omitempty"` + Title string `json:"title"` + } `json:"warning,omitempty"` + } `json:"integrations"` + Warning *struct { + Message *string `json:"message,omitempty"` + Title string `json:"title"` + } `json:"warning,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - queryURL, err := serverURL.Parse(operationPath) + return response, nil +} + +// ParseGetFleetRemoteSyncedIntegrationsOutputidRemoteStatusResponse parses an HTTP response from a GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusWithResponse call +func ParseGetFleetRemoteSyncedIntegrationsOutputidRemoteStatusResponse(rsp *http.Response) (*GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.Format != nil { + response := &GetFleetRemoteSyncedIntegrationsOutputidRemoteStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + CustomAssets *map[string]struct { + Error *string `json:"error,omitempty"` + IsDeleted *bool `json:"is_deleted,omitempty"` + Name string `json:"name"` + PackageName string `json:"package_name"` + PackageVersion string `json:"package_version"` + SyncStatus GetFleetRemoteSyncedIntegrationsOutputidRemoteStatus200CustomAssetsSyncStatus `json:"sync_status"` + Type string `json:"type"` + } `json:"custom_assets,omitempty"` + Error *string `json:"error,omitempty"` + Integrations []struct { + Error *string `json:"error,omitempty"` + Id *string `json:"id,omitempty"` + InstallStatus struct { + Main string `json:"main"` + Remote *string `json:"remote,omitempty"` + } `json:"install_status"` + PackageName *string `json:"package_name,omitempty"` + PackageVersion *string `json:"package_version,omitempty"` + SyncStatus GetFleetRemoteSyncedIntegrationsOutputidRemoteStatus200IntegrationsSyncStatus `json:"sync_status"` + UpdatedAt *string `json:"updated_at,omitempty"` + Warning *struct { + Message *string `json:"message,omitempty"` + Title string `json:"title"` + } `json:"warning,omitempty"` + } `json:"integrations"` + Warning *struct { + Message *string `json:"message,omitempty"` + Title string `json:"title"` + } `json:"warning,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("POST", queryURL.String(), body) + return response, nil +} + +// ParsePostFleetServiceTokensResponse parses an HTTP response from a PostFleetServiceTokensWithResponse call +func ParsePostFleetServiceTokensResponse(rsp *http.Response) (*PostFleetServiceTokensResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) - - return req, nil -} - -// NewDeleteFleetPackagePoliciesPackagepolicyidRequest generates requests for DeleteFleetPackagePoliciesPackagepolicyid -func NewDeleteFleetPackagePoliciesPackagepolicyidRequest(server string, packagePolicyId string, params *DeleteFleetPackagePoliciesPackagepolicyidParams) (*http.Request, error) { - var err error + response := &PostFleetServiceTokensResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - var pathParam0 string + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Name string `json:"name"` + Value string `json:"value"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "packagePolicyId", runtime.ParamLocationPath, packagePolicyId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/api/fleet/package_policies/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseGetFleetSettingsResponse parses an HTTP response from a GetFleetSettingsWithResponse call +func ParseGetFleetSettingsResponse(rsp *http.Response) (*GetFleetSettingsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() + response := &GetFleetSettingsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - if params.Force != nil { + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + DeleteUnenrolledAgents *struct { + Enabled bool `json:"enabled"` + IsPreconfigured bool `json:"is_preconfigured"` + } `json:"delete_unenrolled_agents,omitempty"` + HasSeenAddDataNotice *bool `json:"has_seen_add_data_notice,omitempty"` + Id string `json:"id"` + OutputSecretStorageRequirementsMet *bool `json:"output_secret_storage_requirements_met,omitempty"` + PreconfiguredFields *[]GetFleetSettings200ItemPreconfiguredFields `json:"preconfigured_fields,omitempty"` + PrereleaseIntegrationsEnabled *bool `json:"prerelease_integrations_enabled,omitempty"` + SecretStorageRequirementsMet *bool `json:"secret_storage_requirements_met,omitempty"` + UseSpaceAwarenessMigrationStartedAt *string `json:"use_space_awareness_migration_started_at"` + UseSpaceAwarenessMigrationStatus *GetFleetSettings200ItemUseSpaceAwarenessMigrationStatus `json:"use_space_awareness_migration_status,omitempty"` + Version *string `json:"version,omitempty"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "force", runtime.ParamLocationQuery, *params.Force); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest struct { + Message string `json:"message"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON404 = &dest - queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) + return response, nil +} + +// ParsePutFleetSettingsResponse parses an HTTP response from a PutFleetSettingsWithResponse call +func ParsePutFleetSettingsResponse(rsp *http.Response) (*PutFleetSettingsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return req, nil -} + response := &PutFleetSettingsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } -// NewGetFleetPackagePoliciesPackagepolicyidRequest generates requests for GetFleetPackagePoliciesPackagepolicyid -func NewGetFleetPackagePoliciesPackagepolicyidRequest(server string, packagePolicyId string, params *GetFleetPackagePoliciesPackagepolicyidParams) (*http.Request, error) { - var err error + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + DeleteUnenrolledAgents *struct { + Enabled bool `json:"enabled"` + IsPreconfigured bool `json:"is_preconfigured"` + } `json:"delete_unenrolled_agents,omitempty"` + HasSeenAddDataNotice *bool `json:"has_seen_add_data_notice,omitempty"` + Id string `json:"id"` + OutputSecretStorageRequirementsMet *bool `json:"output_secret_storage_requirements_met,omitempty"` + PreconfiguredFields *[]PutFleetSettings200ItemPreconfiguredFields `json:"preconfigured_fields,omitempty"` + PrereleaseIntegrationsEnabled *bool `json:"prerelease_integrations_enabled,omitempty"` + SecretStorageRequirementsMet *bool `json:"secret_storage_requirements_met,omitempty"` + UseSpaceAwarenessMigrationStartedAt *string `json:"use_space_awareness_migration_started_at"` + UseSpaceAwarenessMigrationStatus *PutFleetSettings200ItemUseSpaceAwarenessMigrationStatus `json:"use_space_awareness_migration_status,omitempty"` + Version *string `json:"version,omitempty"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "packagePolicyId", runtime.ParamLocationPath, packagePolicyId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest struct { + Message string `json:"message"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/api/fleet/package_policies/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParsePostFleetSetupResponse parses an HTTP response from a PostFleetSetupWithResponse call +func ParsePostFleetSetupResponse(rsp *http.Response) (*PostFleetSetupResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.Format != nil { + response := &PostFleetSetupResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + IsInitialized bool `json:"isInitialized"` + NonFatalErrors []struct { + Message string `json:"message"` + Name string `json:"name"` + } `json:"nonFatalErrors"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - queryURL.RawQuery = queryValues.Encode() - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest struct { + Message string `json:"message"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err } - return req, nil + return response, nil } -// NewPutFleetPackagePoliciesPackagepolicyidRequest calls the generic PutFleetPackagePoliciesPackagepolicyid builder with application/json body -func NewPutFleetPackagePoliciesPackagepolicyidRequest(server string, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, body PutFleetPackagePoliciesPackagepolicyidJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) +// ParseGetFleetSpaceSettingsResponse parses an HTTP response from a GetFleetSpaceSettingsWithResponse call +func ParseGetFleetSpaceSettingsResponse(rsp *http.Response) (*GetFleetSpaceSettingsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPutFleetPackagePoliciesPackagepolicyidRequestWithBody(server, packagePolicyId, params, "application/json", bodyReader) -} -// NewPutFleetPackagePoliciesPackagepolicyidRequestWithBody generates requests for PutFleetPackagePoliciesPackagepolicyid with any type of body -func NewPutFleetPackagePoliciesPackagepolicyidRequestWithBody(server string, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, contentType string, body io.Reader) (*http.Request, error) { - var err error + response := &GetFleetSpaceSettingsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - var pathParam0 string + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + AllowedNamespacePrefixes []string `json:"allowed_namespace_prefixes"` + ManagedBy *string `json:"managed_by,omitempty"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "packagePolicyId", runtime.ParamLocationPath, packagePolicyId) - if err != nil { - return nil, err } - serverURL, err := url.Parse(server) + return response, nil +} + +// ParsePutFleetSpaceSettingsResponse parses an HTTP response from a PutFleetSpaceSettingsWithResponse call +func ParsePutFleetSpaceSettingsResponse(rsp *http.Response) (*PutFleetSpaceSettingsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/fleet/package_policies/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &PutFleetSpaceSettingsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + AllowedNamespacePrefixes []string `json:"allowed_namespace_prefixes"` + ManagedBy *string `json:"managed_by,omitempty"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil +} + +// ParseGetFleetUninstallTokensResponse parses an HTTP response from a GetFleetUninstallTokensWithResponse call +func ParseGetFleetUninstallTokensResponse(rsp *http.Response) (*GetFleetUninstallTokensResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - if params != nil { - queryValues := queryURL.Query() - - if params.Format != nil { + response := &GetFleetUninstallTokensResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - if queryFrag, err := runtime.StyleParamWithLocation("form", true, "format", runtime.ParamLocationQuery, *params.Format); err != nil { - return nil, err - } else if parsed, err := url.ParseQuery(queryFrag); err != nil { - return nil, err - } else { - for k, v := range parsed { - for _, v2 := range v { - queryValues.Add(k, v2) - } - } - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Items []struct { + CreatedAt string `json:"created_at"` + Id string `json:"id"` + Namespaces *[]string `json:"namespaces,omitempty"` + PolicyId string `json:"policy_id"` + PolicyName *string `json:"policy_name"` + } `json:"items"` + Page float32 `json:"page"` + PerPage float32 `json:"perPage"` + Total float32 `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - queryURL.RawQuery = queryValues.Encode() } - req, err := http.NewRequest("PUT", queryURL.String(), body) + return response, nil +} + +// ParseGetFleetUninstallTokensUninstalltokenidResponse parses an HTTP response from a GetFleetUninstallTokensUninstalltokenidWithResponse call +func ParseGetFleetUninstallTokensUninstalltokenidResponse(rsp *http.Response) (*GetFleetUninstallTokensUninstalltokenidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) + response := &GetFleetUninstallTokensUninstalltokenidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Item struct { + CreatedAt string `json:"created_at"` + Id string `json:"id"` + Namespaces *[]string `json:"namespaces,omitempty"` + PolicyId string `json:"policy_id"` + PolicyName *string `json:"policy_name"` + Token string `json:"token"` + } `json:"item"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Attributes interface{} `json:"attributes"` + Error *string `json:"error,omitempty"` + ErrorType *string `json:"errorType,omitempty"` + Message string `json:"message"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// NewPostParametersRequest calls the generic PostParameters builder with application/json body -func NewPostParametersRequest(server string, body PostParametersJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err } - bodyReader = bytes.NewReader(buf) - return NewPostParametersRequestWithBody(server, "application/json", bodyReader) -} -// NewPostParametersRequestWithBody generates requests for PostParameters with any type of body -func NewPostParametersRequestWithBody(server string, contentType string, body io.Reader) (*http.Request, error) { - var err error + return response, nil +} - serverURL, err := url.Parse(server) +// ParseDeleteListResponse parses an HTTP response from a DeleteListWithResponse call +func ParseDeleteListResponse(rsp *http.Response) (*DeleteListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/synthetics/params") - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &DeleteListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityListsAPIList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - req.Header.Add("Content-Type", contentType) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - return req, nil -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// NewDeleteParameterRequest generates requests for DeleteParameter -func NewDeleteParameterRequest(server string, id string) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) - if err != nil { - return nil, err } - serverURL, err := url.Parse(server) + return response, nil +} + +// ParseReadListResponse parses an HTTP response from a ReadListWithResponse call +func ParseReadListResponse(rsp *http.Response) (*ReadListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &ReadListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityListsAPIList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - req, err := http.NewRequest("DELETE", queryURL.String(), nil) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - return req, nil -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// NewGetParameterRequest generates requests for GetParameter -func NewGetParameterRequest(server string, id string) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParsePatchListResponse parses an HTTP response from a PatchListWithResponse call +func ParsePatchListResponse(rsp *http.Response) (*PatchListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err + response := &PatchListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return req, nil -} - -// NewPutParameterRequest calls the generic PutParameter builder with application/json body -func NewPutParameterRequest(server string, id string, body PutParameterJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewPutParameterRequestWithBody(server, id, "application/json", bodyReader) -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityListsAPIList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewPutParameterRequestWithBody generates requests for PutParameter with any type of body -func NewPutParameterRequestWithBody(server string, id string, contentType string, body io.Reader) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - operationPath := fmt.Sprintf("/api/synthetics/params/%s", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err } - req, err := http.NewRequest("PUT", queryURL.String(), body) + return response, nil +} + +// ParseCreateListResponse parses an HTTP response from a CreateListWithResponse call +func ParseCreateListResponse(rsp *http.Response) (*CreateListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) + response := &CreateListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityListsAPIList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewDeleteActionsConnectorIdRequest generates requests for DeleteActionsConnectorId -func NewDeleteActionsConnectorIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - var pathParam1 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseUpdateListResponse parses an HTTP response from a UpdateListWithResponse call +func ParseUpdateListResponse(rsp *http.Response) (*UpdateListResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) - if err != nil { - return nil, err + response := &UpdateListResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return req, nil -} - -// NewGetActionsConnectorIdRequest generates requests for GetActionsConnectorId -func NewGetActionsConnectorIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { - var err error + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityListsAPIList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - var pathParam1 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + return response, nil +} - req, err := http.NewRequest("GET", queryURL.String(), nil) +// ParseFindListsResponse parses an HTTP response from a FindListsWithResponse call +func ParseFindListsResponse(rsp *http.Response) (*FindListsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return req, nil -} - -// NewPostActionsConnectorIdRequest calls the generic PostActionsConnectorId builder with application/json body -func NewPostActionsConnectorIdRequest(server string, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err + response := &FindListsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - bodyReader = bytes.NewReader(buf) - return NewPostActionsConnectorIdRequestWithBody(server, spaceId, id, "application/json", bodyReader) -} -// NewPostActionsConnectorIdRequestWithBody generates requests for PostActionsConnectorId with any type of body -func NewPostActionsConnectorIdRequestWithBody(server string, spaceId SpaceId, id string, contentType string, body io.Reader) (*http.Request, error) { - var err error + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Cursor SecurityListsAPIFindListsCursor `json:"cursor"` + Data []SecurityListsAPIList `json:"data"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - var pathParam1 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseDeleteListIndexResponse parses an HTTP response from a DeleteListIndexWithResponse call +func ParseDeleteListIndexResponse(rsp *http.Response) (*DeleteListIndexResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err + response := &DeleteListIndexResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req.Header.Add("Content-Type", contentType) - - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Acknowledged bool `json:"acknowledged"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewPutActionsConnectorIdRequest calls the generic PutActionsConnectorId builder with application/json body -func NewPutActionsConnectorIdRequest(server string, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewPutActionsConnectorIdRequestWithBody(server, spaceId, id, "application/json", bodyReader) -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// NewPutActionsConnectorIdRequestWithBody generates requests for PutActionsConnectorId with any type of body -func NewPutActionsConnectorIdRequestWithBody(server string, spaceId SpaceId, id string, contentType string, body io.Reader) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - var pathParam1 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) - if err != nil { - return nil, err } - serverURL, err := url.Parse(server) + return response, nil +} + +// ParseReadListIndexResponse parses an HTTP response from a ReadListIndexWithResponse call +func ParseReadListIndexResponse(rsp *http.Response) (*ReadListIndexResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/s/%s/api/actions/connector/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &ReadListIndexResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + ListIndex bool `json:"list_index"` + ListItemIndex bool `json:"list_item_index"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - req, err := http.NewRequest("PUT", queryURL.String(), body) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - req.Header.Add("Content-Type", contentType) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - return req, nil -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest -// NewGetActionsConnectorsRequest generates requests for GetActionsConnectors -func NewGetActionsConnectorsRequest(server string, spaceId SpaceId) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err } - serverURL, err := url.Parse(server) + return response, nil +} + +// ParseCreateListIndexResponse parses an HTTP response from a CreateListIndexWithResponse call +func ParseCreateListIndexResponse(rsp *http.Response) (*CreateListIndexResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - operationPath := fmt.Sprintf("/s/%s/api/actions/connectors", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath + response := &CreateListIndexResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Acknowledged bool `json:"acknowledged"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - return req, nil -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// NewGetAllDataViewsDefaultRequest generates requests for GetAllDataViewsDefault -func NewGetAllDataViewsDefaultRequest(server string, spaceId SpaceId) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/s/%s/api/data_views", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseDeleteListItemResponse parses an HTTP response from a DeleteListItemWithResponse call +func ParseDeleteListItemResponse(rsp *http.Response) (*DeleteListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err + response := &DeleteListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewCreateDataViewDefaultwRequest calls the generic CreateDataViewDefaultw builder with application/json body -func NewCreateDataViewDefaultwRequest(server string, spaceId SpaceId, body CreateDataViewDefaultwJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewCreateDataViewDefaultwRequestWithBody(server, spaceId, "application/json", bodyReader) -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// NewCreateDataViewDefaultwRequestWithBody generates requests for CreateDataViewDefaultw with any type of body -func NewCreateDataViewDefaultwRequestWithBody(server string, spaceId SpaceId, contentType string, body io.Reader) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - operationPath := fmt.Sprintf("/s/%s/api/data_views/data_view", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath } - queryURL, err := serverURL.Parse(operationPath) - if err != nil { - return nil, err - } + return response, nil +} - req, err := http.NewRequest("POST", queryURL.String(), body) +// ParseReadListItemResponse parses an HTTP response from a ReadListItemWithResponse call +func ParseReadListItemResponse(rsp *http.Response) (*ReadListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req.Header.Add("Content-Type", contentType) + response := &ReadListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewDeleteDataViewDefaultRequest generates requests for DeleteDataViewDefault -func NewDeleteDataViewDefaultRequest(server string, spaceId SpaceId, viewId DataViewsViewId) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - var pathParam1 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/s/%s/api/data_views/data_view/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParsePatchListItemResponse parses an HTTP response from a PatchListItemWithResponse call +func ParsePatchListItemResponse(rsp *http.Response) (*PatchListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) - if err != nil { - return nil, err + response := &PatchListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityListsAPIListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewGetDataViewDefaultRequest generates requests for GetDataViewDefault -func NewGetDataViewDefaultRequest(server string, spaceId SpaceId, viewId DataViewsViewId) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - var pathParam1 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/s/%s/api/data_views/data_view/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseCreateListItemResponse parses an HTTP response from a CreateListItemWithResponse call +func ParseCreateListItemResponse(rsp *http.Response) (*CreateListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err + response := &CreateListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityListsAPIListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewUpdateDataViewDefaultRequest calls the generic UpdateDataViewDefault builder with application/json body -func NewUpdateDataViewDefaultRequest(server string, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewUpdateDataViewDefaultRequestWithBody(server, spaceId, viewId, "application/json", bodyReader) -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// NewUpdateDataViewDefaultRequestWithBody generates requests for UpdateDataViewDefault with any type of body -func NewUpdateDataViewDefaultRequestWithBody(server string, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - var pathParam1 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "viewId", runtime.ParamLocationPath, viewId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/s/%s/api/data_views/data_view/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseUpdateListItemResponse parses an HTTP response from a UpdateListItemWithResponse call +func ParseUpdateListItemResponse(rsp *http.Response) (*UpdateListItemResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err + response := &UpdateListItemResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req.Header.Add("Content-Type", contentType) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityListsAPIListItem + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - return req, nil -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// NewPostMaintenanceWindowRequest calls the generic PostMaintenanceWindow builder with application/json body -func NewPostMaintenanceWindowRequest(server string, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewPostMaintenanceWindowRequestWithBody(server, spaceId, "application/json", bodyReader) -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// NewPostMaintenanceWindowRequestWithBody generates requests for PostMaintenanceWindow with any type of body -func NewPostMaintenanceWindowRequestWithBody(server string, spaceId SpaceId, contentType string, body io.Reader) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/s/%s/api/maintenance_window", pathParam0) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseExportListItemsResponse parses an HTTP response from a ExportListItemsWithResponse call +func ParseExportListItemsResponse(rsp *http.Response) (*ExportListItemsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("POST", queryURL.String(), body) - if err != nil { - return nil, err + response := &ExportListItemsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req.Header.Add("Content-Type", contentType) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - return req, nil -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest -// NewDeleteMaintenanceWindowIdRequest generates requests for DeleteMaintenanceWindowId -func NewDeleteMaintenanceWindowIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err } - var pathParam1 string + return response, nil +} - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) +// ParseFindListItemsResponse parses an HTTP response from a FindListItemsWithResponse call +func ParseFindListItemsResponse(rsp *http.Response) (*FindListItemsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - serverURL, err := url.Parse(server) - if err != nil { - return nil, err + response := &FindListItemsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - operationPath := fmt.Sprintf("/s/%s/api/maintenance_window/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Cursor Returns the items that come after the last item returned in the previous call (use the `cursor` value returned in the previous call). This parameter uses the `tie_breaker_id` field to ensure all items are sorted and returned correctly. + Cursor SecurityListsAPIFindListItemsCursor `json:"cursor"` + Data []SecurityListsAPIListItem `json:"data"` + Page int `json:"page"` + PerPage int `json:"per_page"` + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest + } - queryURL, err := serverURL.Parse(operationPath) + return response, nil +} + +// ParseImportListItemsResponse parses an HTTP response from a ImportListItemsWithResponse call +func ParseImportListItemsResponse(rsp *http.Response) (*ImportListItemsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("DELETE", queryURL.String(), nil) - if err != nil { - return nil, err + response := &ImportListItemsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityListsAPIList + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewGetMaintenanceWindowIdRequest generates requests for GetMaintenanceWindowId -func NewGetMaintenanceWindowIdRequest(server string, spaceId SpaceId, id string) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - var pathParam1 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - serverURL, err := url.Parse(server) - if err != nil { - return nil, err } - operationPath := fmt.Sprintf("/s/%s/api/maintenance_window/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseReadListPrivilegesResponse parses an HTTP response from a ReadListPrivilegesWithResponse call +func ParseReadListPrivilegesResponse(rsp *http.Response) (*ReadListPrivilegesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("GET", queryURL.String(), nil) - if err != nil { - return nil, err + response := &ReadListPrivilegesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return req, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + IsAuthenticated bool `json:"is_authenticated"` + ListItems SecurityListsAPIListItemPrivileges `json:"listItems"` + Lists SecurityListsAPIListPrivileges `json:"lists"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// NewPatchMaintenanceWindowIdRequest calls the generic PatchMaintenanceWindowId builder with application/json body -func NewPatchMaintenanceWindowIdRequest(server string, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody) (*http.Request, error) { - var bodyReader io.Reader - buf, err := json.Marshal(body) - if err != nil { - return nil, err - } - bodyReader = bytes.NewReader(buf) - return NewPatchMaintenanceWindowIdRequestWithBody(server, spaceId, id, "application/json", bodyReader) -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// NewPatchMaintenanceWindowIdRequestWithBody generates requests for PatchMaintenanceWindowId with any type of body -func NewPatchMaintenanceWindowIdRequestWithBody(server string, spaceId SpaceId, id string, contentType string, body io.Reader) (*http.Request, error) { - var err error + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - var pathParam0 string + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SecurityListsAPIPlatformErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 500: + var dest SecurityListsAPISiemErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON500 = &dest - pathParam0, err = runtime.StyleParamWithLocation("simple", false, "spaceId", runtime.ParamLocationPath, spaceId) - if err != nil { - return nil, err } - var pathParam1 string + return response, nil +} - pathParam1, err = runtime.StyleParamWithLocation("simple", false, "id", runtime.ParamLocationPath, id) +// ParseDeleteLogstashPipelineResponse parses an HTTP response from a DeleteLogstashPipelineWithResponse call +func ParseDeleteLogstashPipelineResponse(rsp *http.Response) (*DeleteLogstashPipelineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - serverURL, err := url.Parse(server) - if err != nil { - return nil, err + response := &DeleteLogstashPipelineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - operationPath := fmt.Sprintf("/s/%s/api/maintenance_window/%s", pathParam0, pathParam1) - if operationPath[0] == '/' { - operationPath = "." + operationPath - } + return response, nil +} - queryURL, err := serverURL.Parse(operationPath) +// ParseGetLogstashPipelineResponse parses an HTTP response from a GetLogstashPipelineWithResponse call +func ParseGetLogstashPipelineResponse(rsp *http.Response) (*GetLogstashPipelineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - req, err := http.NewRequest("PATCH", queryURL.String(), body) - if err != nil { - return nil, err + response := &GetLogstashPipelineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - req.Header.Add("Content-Type", contentType) - - return req, nil -} - -func (c *Client) applyEditors(ctx context.Context, req *http.Request, additionalEditors []RequestEditorFn) error { - for _, r := range c.RequestEditors { - if err := r(ctx, req); err != nil { - return err - } - } - for _, r := range additionalEditors { - if err := r(ctx, req); err != nil { - return err + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON200 = &dest + } - return nil -} -// ClientWithResponses builds on ClientInterface to offer response payloads -type ClientWithResponses struct { - ClientInterface + return response, nil } -// NewClientWithResponses creates a new ClientWithResponses, which wraps -// Client with return type handling -func NewClientWithResponses(server string, opts ...ClientOption) (*ClientWithResponses, error) { - client, err := NewClient(server, opts...) +// ParsePutLogstashPipelineResponse parses an HTTP response from a PutLogstashPipelineWithResponse call +func ParsePutLogstashPipelineResponse(rsp *http.Response) (*PutLogstashPipelineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return &ClientWithResponses{client}, nil -} -// WithBaseURL overrides the baseURL. -func WithBaseURL(baseURL string) ClientOption { - return func(c *Client) error { - newBaseURL, err := url.Parse(baseURL) - if err != nil { - return err - } - c.Server = newBaseURL.String() - return nil + response := &PutLogstashPipelineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } + + return response, nil } -// ClientWithResponsesInterface is the interface specification for the client with responses above. -type ClientWithResponsesInterface interface { - // DeleteAgentConfigurationWithBodyWithResponse request with any body - DeleteAgentConfigurationWithBodyWithResponse(ctx context.Context, params *DeleteAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteAgentConfigurationResponse, error) +// ParseGetLogstashPipelinesResponse parses an HTTP response from a GetLogstashPipelinesWithResponse call +func ParseGetLogstashPipelinesResponse(rsp *http.Response) (*GetLogstashPipelinesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - DeleteAgentConfigurationWithResponse(ctx context.Context, params *DeleteAgentConfigurationParams, body DeleteAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteAgentConfigurationResponse, error) + response := &GetLogstashPipelinesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // GetAgentConfigurationsWithResponse request - GetAgentConfigurationsWithResponse(ctx context.Context, params *GetAgentConfigurationsParams, reqEditors ...RequestEditorFn) (*GetAgentConfigurationsResponse, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // CreateUpdateAgentConfigurationWithBodyWithResponse request with any body - CreateUpdateAgentConfigurationWithBodyWithResponse(ctx context.Context, params *CreateUpdateAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateUpdateAgentConfigurationResponse, error) + } - CreateUpdateAgentConfigurationWithResponse(ctx context.Context, params *CreateUpdateAgentConfigurationParams, body CreateUpdateAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateUpdateAgentConfigurationResponse, error) + return response, nil +} - // GetFleetAgentPoliciesWithResponse request - GetFleetAgentPoliciesWithResponse(ctx context.Context, params *GetFleetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesResponse, error) +// ParsePostMaintenanceWindowIdArchiveResponse parses an HTTP response from a PostMaintenanceWindowIdArchiveWithResponse call +func ParsePostMaintenanceWindowIdArchiveResponse(rsp *http.Response) (*PostMaintenanceWindowIdArchiveResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // PostFleetAgentPoliciesWithBodyWithResponse request with any body - PostFleetAgentPoliciesWithBodyWithResponse(ctx context.Context, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesResponse, error) + response := &PostMaintenanceWindowIdArchiveResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - PostFleetAgentPoliciesWithResponse(ctx context.Context, params *PostFleetAgentPoliciesParams, body PostFleetAgentPoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesResponse, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` - // PostFleetAgentPoliciesDeleteWithBodyWithResponse request with any body - PostFleetAgentPoliciesDeleteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesDeleteResponse, error) + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` - PostFleetAgentPoliciesDeleteWithResponse(ctx context.Context, body PostFleetAgentPoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesDeleteResponse, error) + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` - // GetFleetAgentPoliciesAgentpolicyidWithResponse request - GetFleetAgentPoliciesAgentpolicyidWithResponse(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidResponse, error) + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` - // PutFleetAgentPoliciesAgentpolicyidWithBodyWithResponse request with any body - PutFleetAgentPoliciesAgentpolicyidWithBodyWithResponse(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetAgentPoliciesAgentpolicyidResponse, error) + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` - PutFleetAgentPoliciesAgentpolicyidWithResponse(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, body PutFleetAgentPoliciesAgentpolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetAgentPoliciesAgentpolicyidResponse, error) + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` - // GetFleetEnrollmentApiKeysWithResponse request - GetFleetEnrollmentApiKeysWithResponse(ctx context.Context, params *GetFleetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*GetFleetEnrollmentApiKeysResponse, error) + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` - // GetFleetEpmPackagesWithResponse request - GetFleetEpmPackagesWithResponse(ctx context.Context, params *GetFleetEpmPackagesParams, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesResponse, error) + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` - // PostFleetEpmPackagesWithBodyWithResponse request with any body - PostFleetEpmPackagesWithBodyWithResponse(ctx context.Context, params *PostFleetEpmPackagesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesResponse, error) + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` - // DeleteFleetEpmPackagesPkgnamePkgversionWithResponse request - DeleteFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*DeleteFleetEpmPackagesPkgnamePkgversionResponse, error) + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` - // GetFleetEpmPackagesPkgnamePkgversionWithResponse request - GetFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesPkgnamePkgversionResponse, error) + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` - // PostFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse request with any body - PostFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionResponse, error) + // Status The current status of the maintenance window. + Status PostMaintenanceWindowIdArchive200Status `json:"status"` - PostFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, body PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionResponse, error) + // Title The name of the maintenance window. + Title string `json:"title"` - // GetFleetFleetServerHostsWithResponse request - GetFleetFleetServerHostsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetFleetServerHostsResponse, error) + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` - // PostFleetFleetServerHostsWithBodyWithResponse request with any body - PostFleetFleetServerHostsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetFleetServerHostsResponse, error) + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - PostFleetFleetServerHostsWithResponse(ctx context.Context, body PostFleetFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetFleetServerHostsResponse, error) + } - // DeleteFleetFleetServerHostsItemidWithResponse request - DeleteFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*DeleteFleetFleetServerHostsItemidResponse, error) + return response, nil +} - // GetFleetFleetServerHostsItemidWithResponse request - GetFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*GetFleetFleetServerHostsItemidResponse, error) +// ParsePostMaintenanceWindowIdUnarchiveResponse parses an HTTP response from a PostMaintenanceWindowIdUnarchiveWithResponse call +func ParsePostMaintenanceWindowIdUnarchiveResponse(rsp *http.Response) (*PostMaintenanceWindowIdUnarchiveResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // PutFleetFleetServerHostsItemidWithBodyWithResponse request with any body - PutFleetFleetServerHostsItemidWithBodyWithResponse(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetFleetServerHostsItemidResponse, error) + response := &PostMaintenanceWindowIdUnarchiveResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - PutFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, body PutFleetFleetServerHostsItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetFleetServerHostsItemidResponse, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` - // GetFleetOutputsWithResponse request - GetFleetOutputsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetOutputsResponse, error) + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` - // PostFleetOutputsWithBodyWithResponse request with any body - PostFleetOutputsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetOutputsResponse, error) + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` - PostFleetOutputsWithResponse(ctx context.Context, body PostFleetOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetOutputsResponse, error) + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` - // DeleteFleetOutputsOutputidWithResponse request - DeleteFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*DeleteFleetOutputsOutputidResponse, error) + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` - // GetFleetOutputsOutputidWithResponse request - GetFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*GetFleetOutputsOutputidResponse, error) + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` - // PutFleetOutputsOutputidWithBodyWithResponse request with any body - PutFleetOutputsOutputidWithBodyWithResponse(ctx context.Context, outputId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetOutputsOutputidResponse, error) + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` - PutFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, body PutFleetOutputsOutputidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetOutputsOutputidResponse, error) + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` - // GetFleetPackagePoliciesWithResponse request - GetFleetPackagePoliciesWithResponse(ctx context.Context, params *GetFleetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*GetFleetPackagePoliciesResponse, error) + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` - // PostFleetPackagePoliciesWithBodyWithResponse request with any body - PostFleetPackagePoliciesWithBodyWithResponse(ctx context.Context, params *PostFleetPackagePoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesResponse, error) + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` - PostFleetPackagePoliciesWithResponse(ctx context.Context, params *PostFleetPackagePoliciesParams, body PostFleetPackagePoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesResponse, error) + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` - // DeleteFleetPackagePoliciesPackagepolicyidWithResponse request - DeleteFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *DeleteFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*DeleteFleetPackagePoliciesPackagepolicyidResponse, error) + // Status The current status of the maintenance window. + Status PostMaintenanceWindowIdUnarchive200Status `json:"status"` - // GetFleetPackagePoliciesPackagepolicyidWithResponse request - GetFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *GetFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*GetFleetPackagePoliciesPackagepolicyidResponse, error) + // Title The name of the maintenance window. + Title string `json:"title"` - // PutFleetPackagePoliciesPackagepolicyidWithBodyWithResponse request with any body - PutFleetPackagePoliciesPackagepolicyidWithBodyWithResponse(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetPackagePoliciesPackagepolicyidResponse, error) + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` - PutFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, body PutFleetPackagePoliciesPackagepolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetPackagePoliciesPackagepolicyidResponse, error) + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // PostParametersWithBodyWithResponse request with any body - PostParametersWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostParametersResponse, error) + } - PostParametersWithResponse(ctx context.Context, body PostParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*PostParametersResponse, error) + return response, nil +} - // DeleteParameterWithResponse request - DeleteParameterWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteParameterResponse, error) +// ParseMlSyncResponse parses an HTTP response from a MlSyncWithResponse call +func ParseMlSyncResponse(rsp *http.Response) (*MlSyncResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // GetParameterWithResponse request - GetParameterWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetParameterResponse, error) + response := &MlSyncResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // PutParameterWithBodyWithResponse request with any body - PutParameterWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutParameterResponse, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest MachineLearningAPIsMlSync200Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - PutParameterWithResponse(ctx context.Context, id string, body PutParameterJSONRequestBody, reqEditors ...RequestEditorFn) (*PutParameterResponse, error) + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest MachineLearningAPIsMlSync4xxResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - // DeleteActionsConnectorIdWithResponse request - DeleteActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteActionsConnectorIdResponse, error) + } - // GetActionsConnectorIdWithResponse request - GetActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetActionsConnectorIdResponse, error) + return response, nil +} - // PostActionsConnectorIdWithBodyWithResponse request with any body - PostActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) +// ParseDeleteNoteResponse parses an HTTP response from a DeleteNoteWithResponse call +func ParseDeleteNoteResponse(rsp *http.Response) (*DeleteNoteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - PostActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) + response := &DeleteNoteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // PutActionsConnectorIdWithBodyWithResponse request with any body - PutActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) + return response, nil +} - PutActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) +// ParseGetNotesResponse parses an HTTP response from a GetNotesWithResponse call +func ParseGetNotesResponse(rsp *http.Response) (*GetNotesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // GetActionsConnectorsWithResponse request - GetActionsConnectorsWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetActionsConnectorsResponse, error) + response := &GetNotesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // GetAllDataViewsDefaultWithResponse request - GetAllDataViewsDefaultWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetAllDataViewsDefaultResponse, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityTimelineAPIGetNotesResult + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // CreateDataViewDefaultwWithBodyWithResponse request with any body - CreateDataViewDefaultwWithBodyWithResponse(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateDataViewDefaultwResponse, error) + } - CreateDataViewDefaultwWithResponse(ctx context.Context, spaceId SpaceId, body CreateDataViewDefaultwJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateDataViewDefaultwResponse, error) + return response, nil +} - // DeleteDataViewDefaultWithResponse request - DeleteDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*DeleteDataViewDefaultResponse, error) +// ParsePersistNoteRouteResponse parses an HTTP response from a PersistNoteRouteWithResponse call +func ParsePersistNoteRouteResponse(rsp *http.Response) (*PersistNoteRouteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // GetDataViewDefaultWithResponse request - GetDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*GetDataViewDefaultResponse, error) + response := &PersistNoteRouteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // UpdateDataViewDefaultWithBodyWithResponse request with any body - UpdateDataViewDefaultWithBodyWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateDataViewDefaultResponse, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityTimelineAPIResponseNote + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - UpdateDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateDataViewDefaultResponse, error) + } - // PostMaintenanceWindowWithBodyWithResponse request with any body - PostMaintenanceWindowWithBodyWithResponse(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) + return response, nil +} - PostMaintenanceWindowWithResponse(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) +// ParseObservabilityAiAssistantChatCompleteResponse parses an HTTP response from a ObservabilityAiAssistantChatCompleteWithResponse call +func ParseObservabilityAiAssistantChatCompleteResponse(rsp *http.Response) (*ObservabilityAiAssistantChatCompleteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // DeleteMaintenanceWindowIdWithResponse request - DeleteMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteMaintenanceWindowIdResponse, error) + response := &ObservabilityAiAssistantChatCompleteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // GetMaintenanceWindowIdWithResponse request - GetMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetMaintenanceWindowIdResponse, error) + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // PatchMaintenanceWindowIdWithBodyWithResponse request with any body - PatchMaintenanceWindowIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) + } - PatchMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) + return response, nil } -type DeleteAgentConfigurationResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *APMUIDeleteAgentConfigurationsResponse - JSON400 *APMUI400Response - JSON401 *APMUI401Response - JSON403 *APMUI403Response - JSON404 *APMUI404Response -} +// ParseOsqueryFindLiveQueriesResponse parses an HTTP response from a OsqueryFindLiveQueriesWithResponse call +func ParseOsqueryFindLiveQueriesResponse(rsp *http.Response) (*OsqueryFindLiveQueriesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } -// Status returns HTTPResponse.Status -func (r DeleteAgentConfigurationResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + response := &OsqueryFindLiveQueriesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteAgentConfigurationResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPIFindLiveQueryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } - return 0 -} -type GetAgentConfigurationsResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *APMUIAgentConfigurationsResponse - JSON400 *APMUI400Response - JSON401 *APMUI401Response - JSON404 *APMUI404Response + return response, nil } -// Status returns HTTPResponse.Status -func (r GetAgentConfigurationsResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseOsqueryCreateLiveQueryResponse parses an HTTP response from a OsqueryCreateLiveQueryWithResponse call +func ParseOsqueryCreateLiveQueryResponse(rsp *http.Response) (*OsqueryCreateLiveQueryResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r GetAgentConfigurationsResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &OsqueryCreateLiveQueryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -type CreateUpdateAgentConfigurationResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *map[string]interface{} - JSON400 *APMUI400Response - JSON401 *APMUI401Response - JSON403 *APMUI403Response - JSON404 *APMUI404Response -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPICreateLiveQueryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// Status returns HTTPResponse.Status -func (r CreateUpdateAgentConfigurationResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r CreateUpdateAgentConfigurationResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 + return response, nil } -type GetFleetAgentPoliciesResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Items []AgentPolicy `json:"items"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` +// ParseOsqueryGetLiveQueryDetailsResponse parses an HTTP response from a OsqueryGetLiveQueryDetailsWithResponse call +func ParseOsqueryGetLiveQueryDetailsResponse(rsp *http.Response) (*OsqueryGetLiveQueryDetailsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } -} -// Status returns HTTPResponse.Status -func (r GetFleetAgentPoliciesResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + response := &OsqueryGetLiveQueryDetailsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r GetFleetAgentPoliciesResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPIFindLiveQueryDetailsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } - return 0 + + return response, nil } -type PostFleetAgentPoliciesResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item AgentPolicy `json:"item"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` +// ParseOsqueryGetLiveQueryResultsResponse parses an HTTP response from a OsqueryGetLiveQueryResultsWithResponse call +func ParseOsqueryGetLiveQueryResultsResponse(rsp *http.Response) (*OsqueryGetLiveQueryResultsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } -} -// Status returns HTTPResponse.Status -func (r PostFleetAgentPoliciesResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + response := &OsqueryGetLiveQueryResultsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PostFleetAgentPoliciesResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPIGetLiveQueryResultsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } - return 0 + + return response, nil } -type PostFleetAgentPoliciesDeleteResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Id string `json:"id"` - Name string `json:"name"` +// ParseOsqueryFindPacksResponse parses an HTTP response from a OsqueryFindPacksWithResponse call +func ParseOsqueryFindPacksResponse(rsp *http.Response) (*OsqueryFindPacksResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + response := &OsqueryFindPacksResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } -} -// Status returns HTTPResponse.Status -func (r PostFleetAgentPoliciesDeleteResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPIFindPacksResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } - return http.StatusText(0) + + return response, nil } -// StatusCode returns HTTPResponse.StatusCode -func (r PostFleetAgentPoliciesDeleteResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode +// ParseOsqueryCreatePacksResponse parses an HTTP response from a OsqueryCreatePacksWithResponse call +func ParseOsqueryCreatePacksResponse(rsp *http.Response) (*OsqueryCreatePacksResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return 0 -} -type GetFleetAgentPoliciesAgentpolicyidResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item AgentPolicy `json:"item"` + response := &OsqueryCreatePacksResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPICreatePacksResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } + + return response, nil } -// Status returns HTTPResponse.Status -func (r GetFleetAgentPoliciesAgentpolicyidResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseOsqueryDeletePacksResponse parses an HTTP response from a OsqueryDeletePacksWithResponse call +func ParseOsqueryDeletePacksResponse(rsp *http.Response) (*OsqueryDeletePacksResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r GetFleetAgentPoliciesAgentpolicyidResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &OsqueryDeletePacksResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -type PutFleetAgentPoliciesAgentpolicyidResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item AgentPolicy `json:"item"` +// ParseOsqueryGetPacksDetailsResponse parses an HTTP response from a OsqueryGetPacksDetailsWithResponse call +func ParseOsqueryGetPacksDetailsResponse(rsp *http.Response) (*OsqueryGetPacksDetailsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + response := &OsqueryGetPacksDetailsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } -} -// Status returns HTTPResponse.Status -func (r PutFleetAgentPoliciesAgentpolicyidResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPIFindPackResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } - return http.StatusText(0) + + return response, nil } -// StatusCode returns HTTPResponse.StatusCode -func (r PutFleetAgentPoliciesAgentpolicyidResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode +// ParseOsqueryUpdatePacksResponse parses an HTTP response from a OsqueryUpdatePacksWithResponse call +func ParseOsqueryUpdatePacksResponse(rsp *http.Response) (*OsqueryUpdatePacksResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return 0 -} -type GetFleetEnrollmentApiKeysResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Items []EnrollmentApiKey `json:"items"` - // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set - List []struct { - // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. - Active bool `json:"active"` + response := &OsqueryUpdatePacksResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. - ApiKey string `json:"api_key"` + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPIUpdatePacksResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // ApiKeyId The ID of the API key in the Security API. - ApiKeyId string `json:"api_key_id"` - CreatedAt string `json:"created_at"` - Hidden *bool `json:"hidden,omitempty"` - Id string `json:"id"` + } - // Name The name of the enrollment API key. - Name *string `json:"name,omitempty"` + return response, nil +} - // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. - PolicyId *string `json:"policy_id,omitempty"` - } `json:"list"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` +// ParseOsqueryFindSavedQueriesResponse parses an HTTP response from a OsqueryFindSavedQueriesWithResponse call +func ParseOsqueryFindSavedQueriesResponse(rsp *http.Response) (*OsqueryFindSavedQueriesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + response := &OsqueryFindSavedQueriesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } -} -// Status returns HTTPResponse.Status -func (r GetFleetEnrollmentApiKeysResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPIFindSavedQueryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } - return http.StatusText(0) + + return response, nil } -// StatusCode returns HTTPResponse.StatusCode -func (r GetFleetEnrollmentApiKeysResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode +// ParseOsqueryCreateSavedQueryResponse parses an HTTP response from a OsqueryCreateSavedQueryWithResponse call +func ParseOsqueryCreateSavedQueryResponse(rsp *http.Response) (*OsqueryCreateSavedQueryResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return 0 -} -type GetFleetEpmPackagesResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Items []PackageListItem `json:"items"` + response := &OsqueryCreateSavedQueryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPICreateSavedQueryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } + + return response, nil } -// Status returns HTTPResponse.Status -func (r GetFleetEpmPackagesResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseOsqueryDeleteSavedQueryResponse parses an HTTP response from a OsqueryDeleteSavedQueryWithResponse call +func ParseOsqueryDeleteSavedQueryResponse(rsp *http.Response) (*OsqueryDeleteSavedQueryResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r GetFleetEpmPackagesResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &OsqueryDeleteSavedQueryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -type PostFleetEpmPackagesResponse struct { - Body []byte - HTTPResponse *http.Response -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPIDefaultSuccessResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// Status returns HTTPResponse.Status -func (r PostFleetEpmPackagesResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status } - return http.StatusText(0) + + return response, nil } -// StatusCode returns HTTPResponse.StatusCode -func (r PostFleetEpmPackagesResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode +// ParseOsqueryGetSavedQueryDetailsResponse parses an HTTP response from a OsqueryGetSavedQueryDetailsWithResponse call +func ParseOsqueryGetSavedQueryDetailsResponse(rsp *http.Response) (*OsqueryGetSavedQueryDetailsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return 0 -} -type DeleteFleetEpmPackagesPkgnamePkgversionResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Items []DeleteFleetEpmPackagesPkgnamePkgversion_200_Items_Item `json:"items"` + response := &OsqueryGetSavedQueryDetailsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPIFindSavedQueryDetailResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } -} -type DeleteFleetEpmPackagesPkgnamePkgversion200Items0 struct { - Id string `json:"id"` - OriginId *string `json:"originId,omitempty"` - Type DeleteFleetEpmPackagesPkgnamePkgversion_200_Items_0_Type `json:"type"` -} -type DeleteFleetEpmPackagesPkgnamePkgversion200Items0Type0 string -type DeleteFleetEpmPackagesPkgnamePkgversion200Items0Type1 = string -type DeleteFleetEpmPackagesPkgnamePkgversion_200_Items_0_Type struct { - union json.RawMessage -} -type DeleteFleetEpmPackagesPkgnamePkgversion200Items1 struct { - Deferred *bool `json:"deferred,omitempty"` - Id string `json:"id"` - Type DeleteFleetEpmPackagesPkgnamePkgversion200Items1Type `json:"type"` - Version *string `json:"version,omitempty"` -} -type DeleteFleetEpmPackagesPkgnamePkgversion200Items1Type string -type DeleteFleetEpmPackagesPkgnamePkgversion_200_Items_Item struct { - union json.RawMessage + + return response, nil } -// Status returns HTTPResponse.Status -func (r DeleteFleetEpmPackagesPkgnamePkgversionResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseOsqueryUpdateSavedQueryResponse parses an HTTP response from a OsqueryUpdateSavedQueryWithResponse call +func ParseOsqueryUpdateSavedQueryResponse(rsp *http.Response) (*OsqueryUpdateSavedQueryResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteFleetEpmPackagesPkgnamePkgversionResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &OsqueryUpdateSavedQueryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityOsqueryAPIUpdateSavedQueryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -type GetFleetEpmPackagesPkgnamePkgversionResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item PackageInfo `json:"item"` - Metadata *struct { - HasPolicies bool `json:"has_policies"` - } `json:"metadata,omitempty"` +// ParsePersistPinnedEventRouteResponse parses an HTTP response from a PersistPinnedEventRouteWithResponse call +func ParsePersistPinnedEventRouteResponse(rsp *http.Response) (*PersistPinnedEventRouteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + response := &PersistPinnedEventRouteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } -} -// Status returns HTTPResponse.Status -func (r GetFleetEpmPackagesPkgnamePkgversionResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityTimelineAPIPersistPinnedEventResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } - return http.StatusText(0) + + return response, nil } -// StatusCode returns HTTPResponse.StatusCode -func (r GetFleetEpmPackagesPkgnamePkgversionResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode +// ParseCleanUpRiskEngineResponse parses an HTTP response from a CleanUpRiskEngineWithResponse call +func ParseCleanUpRiskEngineResponse(rsp *http.Response) (*CleanUpRiskEngineResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return 0 -} -type PostFleetEpmPackagesPkgnamePkgversionResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - UnderscoreMeta struct { - InstallSource string `json:"install_source"` - Name string `json:"name"` - } `json:"_meta"` - Items []PostFleetEpmPackagesPkgnamePkgversion_200_Items_Item `json:"items"` + response := &CleanUpRiskEngineResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + CleanupSuccessful *bool `json:"cleanup_successful,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SecurityEntityAnalyticsAPITaskManagerUnavailableResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest SecurityEntityAnalyticsAPICleanUpRiskEngineErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + } -} -type PostFleetEpmPackagesPkgnamePkgversion200Items0 struct { - Id string `json:"id"` - OriginId *string `json:"originId,omitempty"` - Type PostFleetEpmPackagesPkgnamePkgversion_200_Items_0_Type `json:"type"` -} -type PostFleetEpmPackagesPkgnamePkgversion200Items0Type0 string -type PostFleetEpmPackagesPkgnamePkgversion200Items0Type1 = string -type PostFleetEpmPackagesPkgnamePkgversion_200_Items_0_Type struct { - union json.RawMessage -} -type PostFleetEpmPackagesPkgnamePkgversion200Items1 struct { - Deferred *bool `json:"deferred,omitempty"` - Id string `json:"id"` - Type PostFleetEpmPackagesPkgnamePkgversion200Items1Type `json:"type"` - Version *string `json:"version,omitempty"` -} -type PostFleetEpmPackagesPkgnamePkgversion200Items1Type string -type PostFleetEpmPackagesPkgnamePkgversion_200_Items_Item struct { - union json.RawMessage + + return response, nil } -// Status returns HTTPResponse.Status -func (r PostFleetEpmPackagesPkgnamePkgversionResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseConfigureRiskEngineSavedObjectResponse parses an HTTP response from a ConfigureRiskEngineSavedObjectWithResponse call +func ParseConfigureRiskEngineSavedObjectResponse(rsp *http.Response) (*ConfigureRiskEngineSavedObjectResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PostFleetEpmPackagesPkgnamePkgversionResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &ConfigureRiskEngineSavedObjectResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + RiskEngineSavedObjectConfigured *bool `json:"risk_engine_saved_object_configured,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SecurityEntityAnalyticsAPITaskManagerUnavailableResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest SecurityEntityAnalyticsAPIConfigureRiskEngineSavedObjectErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + } - return 0 + + return response, nil } -type GetFleetFleetServerHostsResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Items []ServerHost `json:"items"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` +// ParseScheduleRiskEngineNowResponse parses an HTTP response from a ScheduleRiskEngineNowWithResponse call +func ParseScheduleRiskEngineNowResponse(rsp *http.Response) (*ScheduleRiskEngineNowResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } -} -// Status returns HTTPResponse.Status -func (r GetFleetFleetServerHostsResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + response := &ScheduleRiskEngineNowResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r GetFleetFleetServerHostsResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityEntityAnalyticsAPIRiskEngineScheduleNowResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SecurityEntityAnalyticsAPITaskManagerUnavailableResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && true: + var dest SecurityEntityAnalyticsAPIRiskEngineScheduleNowErrorResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSONDefault = &dest + } - return 0 + + return response, nil } -type PostFleetFleetServerHostsResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item ServerHost `json:"item"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` +// ParseBulkCreateSavedObjectsResponse parses an HTTP response from a BulkCreateSavedObjectsWithResponse call +func ParseBulkCreateSavedObjectsResponse(rsp *http.Response) (*BulkCreateSavedObjectsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } -} -// Status returns HTTPResponse.Status -func (r PostFleetFleetServerHostsResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + response := &BulkCreateSavedObjectsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PostFleetFleetServerHostsResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SavedObjects400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return 0 + + return response, nil } -type DeleteFleetFleetServerHostsItemidResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Id string `json:"id"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` +// ParseBulkDeleteSavedObjectsResponse parses an HTTP response from a BulkDeleteSavedObjectsWithResponse call +func ParseBulkDeleteSavedObjectsResponse(rsp *http.Response) (*BulkDeleteSavedObjectsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } -} -// Status returns HTTPResponse.Status -func (r DeleteFleetFleetServerHostsItemidResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + response := &BulkDeleteSavedObjectsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteFleetFleetServerHostsItemidResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SavedObjects400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return 0 + + return response, nil } -type GetFleetFleetServerHostsItemidResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item ServerHost `json:"item"` +// ParseBulkGetSavedObjectsResponse parses an HTTP response from a BulkGetSavedObjectsWithResponse call +func ParseBulkGetSavedObjectsResponse(rsp *http.Response) (*BulkGetSavedObjectsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + response := &BulkGetSavedObjectsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } -} -// Status returns HTTPResponse.Status -func (r GetFleetFleetServerHostsItemidResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SavedObjects400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return http.StatusText(0) + + return response, nil } -// StatusCode returns HTTPResponse.StatusCode -func (r GetFleetFleetServerHostsItemidResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode +// ParseBulkResolveSavedObjectsResponse parses an HTTP response from a BulkResolveSavedObjectsWithResponse call +func ParseBulkResolveSavedObjectsResponse(rsp *http.Response) (*BulkResolveSavedObjectsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return 0 -} -type PutFleetFleetServerHostsItemidResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item ServerHost `json:"item"` + response := &BulkResolveSavedObjectsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SavedObjects400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } + + return response, nil } -// Status returns HTTPResponse.Status -func (r PutFleetFleetServerHostsItemidResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseBulkUpdateSavedObjectsResponse parses an HTTP response from a BulkUpdateSavedObjectsWithResponse call +func ParseBulkUpdateSavedObjectsResponse(rsp *http.Response) (*BulkUpdateSavedObjectsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PutFleetFleetServerHostsItemidResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &BulkUpdateSavedObjectsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SavedObjects400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil } -type GetFleetOutputsResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Items []OutputUnion `json:"items"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` +// ParsePostSavedObjectsExportResponse parses an HTTP response from a PostSavedObjectsExportWithResponse call +func ParsePostSavedObjectsExportResponse(rsp *http.Response) (*PostSavedObjectsExportResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + response := &PostSavedObjectsExportResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } -} -// Status returns HTTPResponse.Status -func (r GetFleetOutputsResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode PostSavedObjectsExport400StatusCode `json:"statusCode"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return http.StatusText(0) + + return response, nil } -// StatusCode returns HTTPResponse.StatusCode -func (r GetFleetOutputsResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode +// ParseFindSavedObjectsResponse parses an HTTP response from a FindSavedObjectsWithResponse call +func ParseFindSavedObjectsResponse(rsp *http.Response) (*FindSavedObjectsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return 0 -} -type PostFleetOutputsResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item OutputUnion `json:"item"` + response := &FindSavedObjectsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SavedObjects400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } + + return response, nil } -// Status returns HTTPResponse.Status -func (r PostFleetOutputsResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParsePostSavedObjectsImportResponse parses an HTTP response from a PostSavedObjectsImportWithResponse call +func ParsePostSavedObjectsImportResponse(rsp *http.Response) (*PostSavedObjectsImportResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PostFleetOutputsResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &PostSavedObjectsImportResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -type DeleteFleetOutputsOutputidResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Id string `json:"id"` + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Errors Indicates the import was unsuccessful and specifies the objects that failed to import. + // + // NOTE: One object may result in multiple errors, which requires separate steps to resolve. For instance, a `missing_references` error and conflict error. + Errors []map[string]interface{} `json:"errors"` + + // Success Indicates when the import was successfully completed. When set to false, some objects may not have been created. For additional information, refer to the `errors` and `successResults` properties. + Success bool `json:"success"` + + // SuccessCount Indicates the number of successfully imported records. + SuccessCount float32 `json:"successCount"` + + // SuccessResults Indicates the objects that are successfully imported, with any metadata if applicable. + // + // NOTE: Objects are created only when all resolvable errors are addressed, including conflicts and missing references. If objects are created as new copies, each entry in the `successResults` array includes a `destinationId` attribute. + SuccessResults []map[string]interface{} `json:"successResults"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error string `json:"error"` + Message string `json:"message"` + StatusCode PostSavedObjectsImport400StatusCode `json:"statusCode"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + return response, nil +} + +// ParseResolveImportErrorsResponse parses an HTTP response from a ResolveImportErrorsWithResponse call +func ParseResolveImportErrorsResponse(rsp *http.Response) (*ResolveImportErrorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - JSON404 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + response := &ResolveImportErrorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } -} -// Status returns HTTPResponse.Status -func (r DeleteFleetOutputsOutputidResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Errors Specifies the objects that failed to resolve. + // + // NOTE: One object can result in multiple errors, which requires separate steps to resolve. For instance, a `missing_references` error and a `conflict` error. + Errors *[]map[string]interface{} `json:"errors,omitempty"` + + // Success Indicates a successful import. When set to `false`, some objects may not have been created. For additional information, refer to the `errors` and `successResults` properties. + Success *bool `json:"success,omitempty"` + + // SuccessCount Indicates the number of successfully resolved records. + SuccessCount *float32 `json:"successCount,omitempty"` + + // SuccessResults Indicates the objects that are successfully imported, with any metadata if applicable. + // + // NOTE: Objects are only created when all resolvable errors are addressed, including conflict and missing references. + SuccessResults *[]map[string]interface{} `json:"successResults,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SavedObjects400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return http.StatusText(0) + + return response, nil } -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteFleetOutputsOutputidResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode +// ParseResolveSavedObjectResponse parses an HTTP response from a ResolveSavedObjectWithResponse call +func ParseResolveSavedObjectResponse(rsp *http.Response) (*ResolveSavedObjectResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return 0 -} -type GetFleetOutputsOutputidResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item OutputUnion `json:"item"` + response := &ResolveSavedObjectResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SavedObjects400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } + + return response, nil } -// Status returns HTTPResponse.Status -func (r GetFleetOutputsOutputidResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseCreateSavedObjectResponse parses an HTTP response from a CreateSavedObjectWithResponse call +func ParseCreateSavedObjectResponse(rsp *http.Response) (*CreateSavedObjectResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r GetFleetOutputsOutputidResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &CreateSavedObjectResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest + + } + + return response, nil } -type PutFleetOutputsOutputidResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item OutputUnion `json:"item"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` +// ParseGetSavedObjectResponse parses an HTTP response from a GetSavedObjectWithResponse call +func ParseGetSavedObjectResponse(rsp *http.Response) (*GetSavedObjectResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } -} -// Status returns HTTPResponse.Status -func (r PutFleetOutputsOutputidResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + response := &GetSavedObjectResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PutFleetOutputsOutputidResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SavedObjects400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return 0 + + return response, nil } -type GetFleetPackagePoliciesResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Items []PackagePolicy `json:"items"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` +// ParseCreateSavedObjectIdResponse parses an HTTP response from a CreateSavedObjectIdWithResponse call +func ParseCreateSavedObjectIdResponse(rsp *http.Response) (*CreateSavedObjectIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } -} -// Status returns HTTPResponse.Status -func (r GetFleetPackagePoliciesResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + response := &CreateSavedObjectIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r GetFleetPackagePoliciesResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest + } - return 0 + + return response, nil } -type PostFleetPackagePoliciesResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item PackagePolicy `json:"item"` +// ParseUpdateSavedObjectResponse parses an HTTP response from a UpdateSavedObjectWithResponse call +func ParseUpdateSavedObjectResponse(rsp *http.Response) (*UpdateSavedObjectResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + response := &UpdateSavedObjectResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - JSON409 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON409 = &dest + } + + return response, nil } -// Status returns HTTPResponse.Status -func (r PostFleetPackagePoliciesResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseGetSecurityRoleResponse parses an HTTP response from a GetSecurityRoleWithResponse call +func ParseGetSecurityRoleResponse(rsp *http.Response) (*GetSecurityRoleResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PostFleetPackagePoliciesResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &GetSecurityRoleResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 + + return response, nil } -type DeleteFleetPackagePoliciesPackagepolicyidResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Id string `json:"id"` +// ParsePostSecurityRoleQueryResponse parses an HTTP response from a PostSecurityRoleQueryWithResponse call +func ParsePostSecurityRoleQueryResponse(rsp *http.Response) (*PostSecurityRoleQueryResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + response := &PostSecurityRoleQueryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } + + return response, nil } -// Status returns HTTPResponse.Status -func (r DeleteFleetPackagePoliciesPackagepolicyidResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseDeleteSecurityRoleNameResponse parses an HTTP response from a DeleteSecurityRoleNameWithResponse call +func ParseDeleteSecurityRoleNameResponse(rsp *http.Response) (*DeleteSecurityRoleNameResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &DeleteSecurityRoleNameResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 + + return response, nil } -type GetFleetPackagePoliciesPackagepolicyidResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item PackagePolicy `json:"item"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` +// ParseGetSecurityRoleNameResponse parses an HTTP response from a GetSecurityRoleNameWithResponse call +func ParseGetSecurityRoleNameResponse(rsp *http.Response) (*GetSecurityRoleNameResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - JSON404 *struct { - Message string `json:"message"` + + response := &GetSecurityRoleNameResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } + + return response, nil } -// Status returns HTTPResponse.Status -func (r GetFleetPackagePoliciesPackagepolicyidResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParsePutSecurityRoleNameResponse parses an HTTP response from a PutSecurityRoleNameWithResponse call +func ParsePutSecurityRoleNameResponse(rsp *http.Response) (*PutSecurityRoleNameResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r GetFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &PutSecurityRoleNameResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 + + return response, nil } -type PutFleetPackagePoliciesPackagepolicyidResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Item PackagePolicy `json:"item"` - } - JSON400 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` +// ParsePostSecurityRolesResponse parses an HTTP response from a PostSecurityRolesWithResponse call +func ParsePostSecurityRolesResponse(rsp *http.Response) (*PostSecurityRolesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - JSON403 *struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + + response := &PostSecurityRolesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } + + return response, nil } -// Status returns HTTPResponse.Status -func (r PutFleetPackagePoliciesPackagepolicyidResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParsePostSecuritySessionInvalidateResponse parses an HTTP response from a PostSecuritySessionInvalidateWithResponse call +func ParsePostSecuritySessionInvalidateResponse(rsp *http.Response) (*PostSecuritySessionInvalidateResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PutFleetPackagePoliciesPackagepolicyidResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &PostSecuritySessionInvalidateResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -type PostParametersResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *CreateParamResponse -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Total The number of sessions that were successfully invalidated. + Total *int `json:"total,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// Status returns HTTPResponse.Status -func (r PostParametersResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PostParametersResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 + return response, nil } -type DeleteParameterResponse struct { - Body []byte - HTTPResponse *http.Response -} +// ParsePerformAnonymizationFieldsBulkActionResponse parses an HTTP response from a PerformAnonymizationFieldsBulkActionWithResponse call +func ParsePerformAnonymizationFieldsBulkActionResponse(rsp *http.Response) (*PerformAnonymizationFieldsBulkActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } -// Status returns HTTPResponse.Status -func (r DeleteParameterResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + response := &PerformAnonymizationFieldsBulkActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteParameterResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIAnonymizationFieldsBulkCrudActionResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + // Error Error type or name. + Error *string `json:"error,omitempty"` + + // Message Detailed error message. + Message *string `json:"message,omitempty"` + + // StatusCode Status code of the response. + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return 0 -} -type GetParameterResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *SyntheticsGetParameterResponse + return response, nil } -// Status returns HTTPResponse.Status -func (r GetParameterResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseFindAnonymizationFieldsResponse parses an HTTP response from a FindAnonymizationFieldsWithResponse call +func ParseFindAnonymizationFieldsResponse(rsp *http.Response) (*FindAnonymizationFieldsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r GetParameterResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &FindAnonymizationFieldsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -type PutParameterResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *map[string]interface{} -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Aggregations *struct { + FieldStatus *struct { + Buckets *struct { + Allowed *struct { + DocCount *int `json:"doc_count,omitempty"` + } `json:"allowed,omitempty"` + Anonymized *struct { + DocCount *int `json:"doc_count,omitempty"` + } `json:"anonymized,omitempty"` + Denied *struct { + DocCount *int `json:"doc_count,omitempty"` + } `json:"denied,omitempty"` + } `json:"buckets,omitempty"` + } `json:"field_status,omitempty"` + } `json:"aggregations,omitempty"` + All *[]SecurityAIAssistantAPIAnonymizationFieldResponse `json:"all,omitempty"` + Data []SecurityAIAssistantAPIAnonymizationFieldResponse `json:"data"` + Page int `json:"page"` + PerPage int `json:"perPage"` + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// Status returns HTTPResponse.Status -func (r PutParameterResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// StatusCode returns HTTPResponse.StatusCode -func (r PutParameterResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode } - return 0 -} -type DeleteActionsConnectorIdResponse struct { - Body []byte - HTTPResponse *http.Response + return response, nil } -// Status returns HTTPResponse.Status -func (r DeleteActionsConnectorIdResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseChatCompleteResponse parses an HTTP response from a ChatCompleteWithResponse call +func ParseChatCompleteResponse(rsp *http.Response) (*ChatCompleteResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteActionsConnectorIdResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &ChatCompleteResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -type GetActionsConnectorIdResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *ConnectorResponse -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + // Error Error type. + Error *string `json:"error,omitempty"` -// Status returns HTTPResponse.Status -func (r GetActionsConnectorIdResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} + // Message Human-readable error message. + Message *string `json:"message,omitempty"` + + // StatusCode HTTP status code. + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// StatusCode returns HTTPResponse.StatusCode -func (r GetActionsConnectorIdResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode } - return 0 -} -type PostActionsConnectorIdResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Config *map[string]interface{} `json:"config,omitempty"` + return response, nil +} - // ConnectorTypeId The connector type identifier. - ConnectorTypeId string `json:"connector_type_id"` +// ParseDeleteAllConversationsResponse parses an HTTP response from a DeleteAllConversationsWithResponse call +func ParseDeleteAllConversationsResponse(rsp *http.Response) (*DeleteAllConversationsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // Id The identifier for the connector. - Id string `json:"id"` + response := &DeleteAllConversationsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // IsDeprecated Indicates whether the connector is deprecated. - IsDeprecated bool `json:"is_deprecated"` + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + Failures *[]string `json:"failures,omitempty"` + Success *bool `json:"success,omitempty"` + TotalDeleted *float32 `json:"totalDeleted,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // IsMissingSecrets Indicates whether the connector is missing secrets. - IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured bool `json:"is_preconfigured"` + } - // IsSystemAction Indicates whether the connector is used for system actions. - IsSystemAction bool `json:"is_system_action"` + return response, nil +} - // Name The name of the rule. - Name string `json:"name"` +// ParseCreateConversationResponse parses an HTTP response from a CreateConversationWithResponse call +func ParseCreateConversationResponse(rsp *http.Response) (*CreateConversationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } -} -// Status returns HTTPResponse.Status -func (r PostActionsConnectorIdResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status + response := &CreateConversationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PostActionsConnectorIdResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIConversationResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return 0 + + return response, nil } -type PutActionsConnectorIdResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - Config *map[string]interface{} `json:"config,omitempty"` +// ParseFindConversationsResponse parses an HTTP response from a FindConversationsWithResponse call +func ParseFindConversationsResponse(rsp *http.Response) (*FindConversationsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // ConnectorTypeId The connector type identifier. - ConnectorTypeId string `json:"connector_type_id"` + response := &FindConversationsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // Id The identifier for the connector. - Id string `json:"id"` + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Data A list of conversations. + Data []SecurityAIAssistantAPIConversationResponse `json:"data"` - // IsDeprecated Indicates whether the connector is deprecated. - IsDeprecated bool `json:"is_deprecated"` + // Page The current page of the results. + Page int `json:"page"` - // IsMissingSecrets Indicates whether the connector is missing secrets. - IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + // PerPage The number of results returned per page. + PerPage int `json:"perPage"` - // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured bool `json:"is_preconfigured"` + // Total The total number of conversations matching the filter criteria. + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // IsSystemAction Indicates whether the connector is used for system actions. - IsSystemAction bool `json:"is_system_action"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // Name The name of the rule. - Name string `json:"name"` } + + return response, nil } -// Status returns HTTPResponse.Status -func (r PutActionsConnectorIdResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseDeleteConversationResponse parses an HTTP response from a DeleteConversationWithResponse call +func ParseDeleteConversationResponse(rsp *http.Response) (*DeleteConversationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PutActionsConnectorIdResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &DeleteConversationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -type GetActionsConnectorsResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *[]ConnectorResponse -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIConversationResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// Status returns HTTPResponse.Status -func (r GetActionsConnectorsResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// StatusCode returns HTTPResponse.StatusCode -func (r GetActionsConnectorsResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode } - return 0 -} -type GetAllDataViewsDefaultResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - DataView *[]GetDataViewsResponseItem `json:"data_view,omitempty"` - } - JSON400 *DataViews400Response + return response, nil } -// Status returns HTTPResponse.Status -func (r GetAllDataViewsDefaultResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseReadConversationResponse parses an HTTP response from a ReadConversationWithResponse call +func ParseReadConversationResponse(rsp *http.Response) (*ReadConversationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r GetAllDataViewsDefaultResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &ReadConversationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -type CreateDataViewDefaultwResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *DataViewsDataViewResponseObject - JSON400 *DataViews400Response -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIConversationResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// Status returns HTTPResponse.Status -func (r CreateDataViewDefaultwResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// StatusCode returns HTTPResponse.StatusCode -func (r CreateDataViewDefaultwResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode } - return 0 -} -type DeleteDataViewDefaultResponse struct { - Body []byte - HTTPResponse *http.Response - JSON404 *DataViews404Response + return response, nil } -// Status returns HTTPResponse.Status -func (r DeleteDataViewDefaultResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseUpdateConversationResponse parses an HTTP response from a UpdateConversationWithResponse call +func ParseUpdateConversationResponse(rsp *http.Response) (*UpdateConversationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteDataViewDefaultResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &UpdateConversationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -type GetDataViewDefaultResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *DataViewsDataViewResponseObject - JSON404 *DataViews404Response -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIConversationResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// Status returns HTTPResponse.Status -func (r GetDataViewDefaultResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status - } - return http.StatusText(0) -} + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + Error *string `json:"error,omitempty"` + Message *string `json:"message,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// StatusCode returns HTTPResponse.StatusCode -func (r GetDataViewDefaultResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode } - return 0 -} -type UpdateDataViewDefaultResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *DataViewsDataViewResponseObject - JSON400 *DataViews400Response + return response, nil } -// Status returns HTTPResponse.Status -func (r UpdateDataViewDefaultResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseCreateKnowledgeBaseEntryResponse parses an HTTP response from a CreateKnowledgeBaseEntryWithResponse call +func ParseCreateKnowledgeBaseEntryResponse(rsp *http.Response) (*CreateKnowledgeBaseEntryResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r UpdateDataViewDefaultResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &CreateKnowledgeBaseEntryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -// DeleteAgentConfigurationWithBodyWithResponse request with arbitrary body returning *DeleteAgentConfigurationResponse -func (c *ClientWithResponses) DeleteAgentConfigurationWithBodyWithResponse(ctx context.Context, params *DeleteAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*DeleteAgentConfigurationResponse, error) { - rsp, err := c.DeleteAgentConfigurationWithBody(ctx, params, contentType, body, reqEditors...) - if err != nil { - return nil, err + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIKnowledgeBaseEntryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return ParseDeleteAgentConfigurationResponse(rsp) + + return response, nil } -func (c *ClientWithResponses) DeleteAgentConfigurationWithResponse(ctx context.Context, params *DeleteAgentConfigurationParams, body DeleteAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*DeleteAgentConfigurationResponse, error) { - rsp, err := c.DeleteAgentConfiguration(ctx, params, body, reqEditors...) +// ParsePerformKnowledgeBaseEntryBulkActionResponse parses an HTTP response from a PerformKnowledgeBaseEntryBulkActionWithResponse call +func ParsePerformKnowledgeBaseEntryBulkActionResponse(rsp *http.Response) (*PerformKnowledgeBaseEntryBulkActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseDeleteAgentConfigurationResponse(rsp) -} -// GetAgentConfigurationsWithResponse request returning *GetAgentConfigurationsResponse -func (c *ClientWithResponses) GetAgentConfigurationsWithResponse(ctx context.Context, params *GetAgentConfigurationsParams, reqEditors ...RequestEditorFn) (*GetAgentConfigurationsResponse, error) { - rsp, err := c.GetAgentConfigurations(ctx, params, reqEditors...) - if err != nil { - return nil, err + response := &PerformKnowledgeBaseEntryBulkActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return ParseGetAgentConfigurationsResponse(rsp) -} -// CreateUpdateAgentConfigurationWithBodyWithResponse request with arbitrary body returning *CreateUpdateAgentConfigurationResponse -func (c *ClientWithResponses) CreateUpdateAgentConfigurationWithBodyWithResponse(ctx context.Context, params *CreateUpdateAgentConfigurationParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateUpdateAgentConfigurationResponse, error) { - rsp, err := c.CreateUpdateAgentConfigurationWithBody(ctx, params, contentType, body, reqEditors...) - if err != nil { - return nil, err + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIKnowledgeBaseEntryBulkCrudActionResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + } - return ParseCreateUpdateAgentConfigurationResponse(rsp) + + return response, nil } -func (c *ClientWithResponses) CreateUpdateAgentConfigurationWithResponse(ctx context.Context, params *CreateUpdateAgentConfigurationParams, body CreateUpdateAgentConfigurationJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateUpdateAgentConfigurationResponse, error) { - rsp, err := c.CreateUpdateAgentConfiguration(ctx, params, body, reqEditors...) +// ParseFindKnowledgeBaseEntriesResponse parses an HTTP response from a FindKnowledgeBaseEntriesWithResponse call +func ParseFindKnowledgeBaseEntriesResponse(rsp *http.Response) (*FindKnowledgeBaseEntriesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseCreateUpdateAgentConfigurationResponse(rsp) -} -type PostMaintenanceWindowResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - // CreatedAt The date and time when the maintenance window was created. - CreatedAt string `json:"created_at"` + response := &FindKnowledgeBaseEntriesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // CreatedBy The identifier for the user that created the maintenance window. - CreatedBy *string `json:"created_by"` + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Data The list of Knowledge Base Entries for the current page. + Data []SecurityAIAssistantAPIKnowledgeBaseEntryResponse `json:"data"` - // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. - Enabled bool `json:"enabled"` + // Page The current page number. + Page int `json:"page"` - // Id The identifier for the maintenance window. - Id string `json:"id"` - Schedule struct { - Custom struct { - // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. - Duration string `json:"duration"` - Recurring *struct { - // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. - End *string `json:"end,omitempty"` + // PerPage The number of Knowledge Base Entries returned per page. + PerPage int `json:"perPage"` - // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. - Every *string `json:"every,omitempty"` + // Total The total number of Knowledge Base Entries available. + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // Occurrences The total number of recurrences of the schedule. - Occurrences *float32 `json:"occurrences,omitempty"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + // Error A short description of the error. + Error *string `json:"error,omitempty"` - // OnMonth The specific months for a recurring schedule. Valid values are 1-12. - OnMonth *[]float32 `json:"onMonth,omitempty"` + // Message A detailed message explaining the error. + Message *string `json:"message,omitempty"` - // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. - OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + // StatusCode The HTTP status code of the error. + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. - OnWeekDay *[]string `json:"onWeekDay,omitempty"` - } `json:"recurring,omitempty"` + } - // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. - Start string `json:"start"` + return response, nil +} - // Timezone The timezone of the schedule. The default timezone is UTC. - Timezone *string `json:"timezone,omitempty"` - } `json:"custom"` - } `json:"schedule"` - Scope *struct { - Alerting struct { - Query struct { - // Kql A filter written in Kibana Query Language (KQL). - Kql string `json:"kql"` - } `json:"query"` - } `json:"alerting"` - } `json:"scope,omitempty"` +// ParseDeleteKnowledgeBaseEntryResponse parses an HTTP response from a DeleteKnowledgeBaseEntryWithResponse call +func ParseDeleteKnowledgeBaseEntryResponse(rsp *http.Response) (*DeleteKnowledgeBaseEntryResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // Status The current status of the maintenance window. - Status PostMaintenanceWindow200Status `json:"status"` + response := &DeleteKnowledgeBaseEntryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // Title The name of the maintenance window. - Title string `json:"title"` + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIDeleteResponseFields + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // UpdatedAt The date and time when the maintenance window was last updated. - UpdatedAt string `json:"updated_at"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // UpdatedBy The identifier for the user that last updated this maintenance window. - UpdatedBy *string `json:"updated_by"` } + + return response, nil } -type PostMaintenanceWindow200Status string -// Status returns HTTPResponse.Status -func (r PostMaintenanceWindowResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParseReadKnowledgeBaseEntryResponse parses an HTTP response from a ReadKnowledgeBaseEntryWithResponse call +func ParseReadKnowledgeBaseEntryResponse(rsp *http.Response) (*ReadKnowledgeBaseEntryResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PostMaintenanceWindowResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &ReadKnowledgeBaseEntryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -type DeleteMaintenanceWindowIdResponse struct { - Body []byte - HTTPResponse *http.Response -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIKnowledgeBaseEntryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// Status returns HTTPResponse.Status -func (r DeleteMaintenanceWindowIdResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status } - return http.StatusText(0) + + return response, nil } -// StatusCode returns HTTPResponse.StatusCode -func (r DeleteMaintenanceWindowIdResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode +// ParseUpdateKnowledgeBaseEntryResponse parses an HTTP response from a UpdateKnowledgeBaseEntryWithResponse call +func ParseUpdateKnowledgeBaseEntryResponse(rsp *http.Response) (*UpdateKnowledgeBaseEntryResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return 0 + + response := &UpdateKnowledgeBaseEntryResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIKnowledgeBaseEntryResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SecurityAIAssistantAPIKnowledgeBaseEntryErrorSchema + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + } + + return response, nil } -type GetMaintenanceWindowIdResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - // CreatedAt The date and time when the maintenance window was created. - CreatedAt string `json:"created_at"` +// ParseReadKnowledgeBaseResponse parses an HTTP response from a ReadKnowledgeBaseWithResponse call +func ParseReadKnowledgeBaseResponse(rsp *http.Response) (*ReadKnowledgeBaseResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &ReadKnowledgeBaseResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // ElserExists Indicates if the ELSER model exists for the KnowledgeBase. + ElserExists *bool `json:"elser_exists,omitempty"` + + // IsSetupAvailable Indicates if the setup process is available for the KnowledgeBase. + IsSetupAvailable *bool `json:"is_setup_available,omitempty"` + + // IsSetupInProgress Indicates if the setup process is currently in progress. + IsSetupInProgress *bool `json:"is_setup_in_progress,omitempty"` - // CreatedBy The identifier for the user that created the maintenance window. - CreatedBy *string `json:"created_by"` + // ProductDocumentationStatus The status of the product documentation in the KnowledgeBase. + ProductDocumentationStatus *string `json:"product_documentation_status,omitempty"` - // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. - Enabled bool `json:"enabled"` + // SecurityLabsExists Indicates if Security Labs documentation exists in the KnowledgeBase. + SecurityLabsExists *bool `json:"security_labs_exists,omitempty"` - // Id The identifier for the maintenance window. - Id string `json:"id"` - Schedule struct { - Custom struct { - // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. - Duration string `json:"duration"` - Recurring *struct { - // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. - End *string `json:"end,omitempty"` + // UserDataExists Indicates if user data exists in the KnowledgeBase. + UserDataExists *bool `json:"user_data_exists,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. - Every *string `json:"every,omitempty"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + // Error A short description of the error. + Error *string `json:"error,omitempty"` - // Occurrences The total number of recurrences of the schedule. - Occurrences *float32 `json:"occurrences,omitempty"` + // Message A detailed error message. + Message *string `json:"message,omitempty"` - // OnMonth The specific months for a recurring schedule. Valid values are 1-12. - OnMonth *[]float32 `json:"onMonth,omitempty"` + // StatusCode The HTTP status code of the error. + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. - OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + } - // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. - OnWeekDay *[]string `json:"onWeekDay,omitempty"` - } `json:"recurring,omitempty"` + return response, nil +} - // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. - Start string `json:"start"` +// ParseCreateKnowledgeBaseResponse parses an HTTP response from a CreateKnowledgeBaseWithResponse call +func ParseCreateKnowledgeBaseResponse(rsp *http.Response) (*CreateKnowledgeBaseResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // Timezone The timezone of the schedule. The default timezone is UTC. - Timezone *string `json:"timezone,omitempty"` - } `json:"custom"` - } `json:"schedule"` - Scope *struct { - Alerting struct { - Query struct { - // Kql A filter written in Kibana Query Language (KQL). - Kql string `json:"kql"` - } `json:"query"` - } `json:"alerting"` - } `json:"scope,omitempty"` + response := &CreateKnowledgeBaseResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // Status The current status of the maintenance window. - Status GetMaintenanceWindowId200Status `json:"status"` + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIKnowledgeBaseResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // Title The name of the maintenance window. - Title string `json:"title"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + // Error A short description of the error. + Error *string `json:"error,omitempty"` - // UpdatedAt The date and time when the maintenance window was last updated. - UpdatedAt string `json:"updated_at"` + // Message A detailed error message. + Message *string `json:"message,omitempty"` + + // StatusCode The HTTP status code of the error. + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // UpdatedBy The identifier for the user that last updated this maintenance window. - UpdatedBy *string `json:"updated_by"` } + + return response, nil } -type GetMaintenanceWindowId200Status string -// Status returns HTTPResponse.Status -func (r GetMaintenanceWindowIdResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status +// ParsePerformPromptsBulkActionResponse parses an HTTP response from a PerformPromptsBulkActionWithResponse call +func ParsePerformPromptsBulkActionResponse(rsp *http.Response) (*PerformPromptsBulkActionResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r GetMaintenanceWindowIdResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode + response := &PerformPromptsBulkActionResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return 0 -} -type PatchMaintenanceWindowIdResponse struct { - Body []byte - HTTPResponse *http.Response - JSON200 *struct { - // CreatedAt The date and time when the maintenance window was created. - CreatedAt string `json:"created_at"` + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityAIAssistantAPIPromptsBulkCrudActionResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // CreatedBy The identifier for the user that created the maintenance window. - CreatedBy *string `json:"created_by"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + // Error A short error message. + Error *string `json:"error,omitempty"` - // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. - Enabled bool `json:"enabled"` + // Message A detailed error message. + Message *string `json:"message,omitempty"` - // Id The identifier for the maintenance window. - Id string `json:"id"` - Schedule struct { - Custom struct { - // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. - Duration string `json:"duration"` - Recurring *struct { - // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. - End *string `json:"end,omitempty"` + // StatusCode The HTTP status code for the error. + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. - Every *string `json:"every,omitempty"` + } - // Occurrences The total number of recurrences of the schedule. - Occurrences *float32 `json:"occurrences,omitempty"` + return response, nil +} - // OnMonth The specific months for a recurring schedule. Valid values are 1-12. - OnMonth *[]float32 `json:"onMonth,omitempty"` +// ParseFindPromptsResponse parses an HTTP response from a FindPromptsWithResponse call +func ParseFindPromptsResponse(rsp *http.Response) (*FindPromptsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. - OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + response := &FindPromptsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } - // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. - OnWeekDay *[]string `json:"onWeekDay,omitempty"` - } `json:"recurring,omitempty"` + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Data The list of prompts returned based on the search query, sorting, and pagination. + Data []SecurityAIAssistantAPIPromptResponse `json:"data"` - // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. - Start string `json:"start"` + // Page Current page number. + Page int `json:"page"` - // Timezone The timezone of the schedule. The default timezone is UTC. - Timezone *string `json:"timezone,omitempty"` - } `json:"custom"` - } `json:"schedule"` - Scope *struct { - Alerting struct { - Query struct { - // Kql A filter written in Kibana Query Language (KQL). - Kql string `json:"kql"` - } `json:"query"` - } `json:"alerting"` - } `json:"scope,omitempty"` + // PerPage Number of prompts per page. + PerPage int `json:"perPage"` - // Status The current status of the maintenance window. - Status PatchMaintenanceWindowId200Status `json:"status"` + // Total Total number of prompts matching the query. + Total int `json:"total"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // Title The name of the maintenance window. - Title string `json:"title"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest struct { + // Error Short error message. + Error *string `json:"error,omitempty"` - // UpdatedAt The date and time when the maintenance window was last updated. - UpdatedAt string `json:"updated_at"` + // Message Detailed description of the error. + Message *string `json:"message,omitempty"` - // UpdatedBy The identifier for the user that last updated this maintenance window. - UpdatedBy *string `json:"updated_by"` - } -} -type PatchMaintenanceWindowId200Status string + // StatusCode HTTP status code for the error. + StatusCode *float32 `json:"statusCode,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest -// Status returns HTTPResponse.Status -func (r PatchMaintenanceWindowIdResponse) Status() string { - if r.HTTPResponse != nil { - return r.HTTPResponse.Status } - return http.StatusText(0) -} -// StatusCode returns HTTPResponse.StatusCode -func (r PatchMaintenanceWindowIdResponse) StatusCode() int { - if r.HTTPResponse != nil { - return r.HTTPResponse.StatusCode - } - return 0 + return response, nil } -// GetFleetAgentPoliciesWithResponse request returning *GetFleetAgentPoliciesResponse -func (c *ClientWithResponses) GetFleetAgentPoliciesWithResponse(ctx context.Context, params *GetFleetAgentPoliciesParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesResponse, error) { - rsp, err := c.GetFleetAgentPolicies(ctx, params, reqEditors...) +// ParsePostUrlResponse parses an HTTP response from a PostUrlWithResponse call +func ParsePostUrlResponse(rsp *http.Response) (*PostUrlResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetFleetAgentPoliciesResponse(rsp) + + response := &PostUrlResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ShortURLAPIsUrlResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -// PostFleetAgentPoliciesWithBodyWithResponse request with arbitrary body returning *PostFleetAgentPoliciesResponse -func (c *ClientWithResponses) PostFleetAgentPoliciesWithBodyWithResponse(ctx context.Context, params *PostFleetAgentPoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesResponse, error) { - rsp, err := c.PostFleetAgentPoliciesWithBody(ctx, params, contentType, body, reqEditors...) +// ParseResolveUrlResponse parses an HTTP response from a ResolveUrlWithResponse call +func ParseResolveUrlResponse(rsp *http.Response) (*ResolveUrlResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetAgentPoliciesResponse(rsp) + + response := &ResolveUrlResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ShortURLAPIsUrlResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -func (c *ClientWithResponses) PostFleetAgentPoliciesWithResponse(ctx context.Context, params *PostFleetAgentPoliciesParams, body PostFleetAgentPoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesResponse, error) { - rsp, err := c.PostFleetAgentPolicies(ctx, params, body, reqEditors...) +// ParseDeleteUrlResponse parses an HTTP response from a DeleteUrlWithResponse call +func ParseDeleteUrlResponse(rsp *http.Response) (*DeleteUrlResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetAgentPoliciesResponse(rsp) + + response := &DeleteUrlResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PostFleetAgentPoliciesDeleteWithBodyWithResponse request with arbitrary body returning *PostFleetAgentPoliciesDeleteResponse -func (c *ClientWithResponses) PostFleetAgentPoliciesDeleteWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesDeleteResponse, error) { - rsp, err := c.PostFleetAgentPoliciesDeleteWithBody(ctx, contentType, body, reqEditors...) +// ParseGetUrlResponse parses an HTTP response from a GetUrlWithResponse call +func ParseGetUrlResponse(rsp *http.Response) (*GetUrlResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetAgentPoliciesDeleteResponse(rsp) + + response := &GetUrlResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest ShortURLAPIsUrlResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -func (c *ClientWithResponses) PostFleetAgentPoliciesDeleteWithResponse(ctx context.Context, body PostFleetAgentPoliciesDeleteJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetAgentPoliciesDeleteResponse, error) { - rsp, err := c.PostFleetAgentPoliciesDelete(ctx, body, reqEditors...) +// ParsePostSpacesCopySavedObjectsResponse parses an HTTP response from a PostSpacesCopySavedObjectsWithResponse call +func ParsePostSpacesCopySavedObjectsResponse(rsp *http.Response) (*PostSpacesCopySavedObjectsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetAgentPoliciesDeleteResponse(rsp) + + response := &PostSpacesCopySavedObjectsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// GetFleetAgentPoliciesAgentpolicyidWithResponse request returning *GetFleetAgentPoliciesAgentpolicyidResponse -func (c *ClientWithResponses) GetFleetAgentPoliciesAgentpolicyidWithResponse(ctx context.Context, agentPolicyId string, params *GetFleetAgentPoliciesAgentpolicyidParams, reqEditors ...RequestEditorFn) (*GetFleetAgentPoliciesAgentpolicyidResponse, error) { - rsp, err := c.GetFleetAgentPoliciesAgentpolicyid(ctx, agentPolicyId, params, reqEditors...) +// ParsePostSpacesDisableLegacyUrlAliasesResponse parses an HTTP response from a PostSpacesDisableLegacyUrlAliasesWithResponse call +func ParsePostSpacesDisableLegacyUrlAliasesResponse(rsp *http.Response) (*PostSpacesDisableLegacyUrlAliasesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetFleetAgentPoliciesAgentpolicyidResponse(rsp) + + response := &PostSpacesDisableLegacyUrlAliasesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PutFleetAgentPoliciesAgentpolicyidWithBodyWithResponse request with arbitrary body returning *PutFleetAgentPoliciesAgentpolicyidResponse -func (c *ClientWithResponses) PutFleetAgentPoliciesAgentpolicyidWithBodyWithResponse(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetAgentPoliciesAgentpolicyidResponse, error) { - rsp, err := c.PutFleetAgentPoliciesAgentpolicyidWithBody(ctx, agentPolicyId, params, contentType, body, reqEditors...) +// ParsePostSpacesGetShareableReferencesResponse parses an HTTP response from a PostSpacesGetShareableReferencesWithResponse call +func ParsePostSpacesGetShareableReferencesResponse(rsp *http.Response) (*PostSpacesGetShareableReferencesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutFleetAgentPoliciesAgentpolicyidResponse(rsp) + + response := &PostSpacesGetShareableReferencesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -func (c *ClientWithResponses) PutFleetAgentPoliciesAgentpolicyidWithResponse(ctx context.Context, agentPolicyId string, params *PutFleetAgentPoliciesAgentpolicyidParams, body PutFleetAgentPoliciesAgentpolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetAgentPoliciesAgentpolicyidResponse, error) { - rsp, err := c.PutFleetAgentPoliciesAgentpolicyid(ctx, agentPolicyId, params, body, reqEditors...) +// ParsePostSpacesResolveCopySavedObjectsErrorsResponse parses an HTTP response from a PostSpacesResolveCopySavedObjectsErrorsWithResponse call +func ParsePostSpacesResolveCopySavedObjectsErrorsResponse(rsp *http.Response) (*PostSpacesResolveCopySavedObjectsErrorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutFleetAgentPoliciesAgentpolicyidResponse(rsp) + + response := &PostSpacesResolveCopySavedObjectsErrorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// GetFleetEnrollmentApiKeysWithResponse request returning *GetFleetEnrollmentApiKeysResponse -func (c *ClientWithResponses) GetFleetEnrollmentApiKeysWithResponse(ctx context.Context, params *GetFleetEnrollmentApiKeysParams, reqEditors ...RequestEditorFn) (*GetFleetEnrollmentApiKeysResponse, error) { - rsp, err := c.GetFleetEnrollmentApiKeys(ctx, params, reqEditors...) +// ParsePostSpacesUpdateObjectsSpacesResponse parses an HTTP response from a PostSpacesUpdateObjectsSpacesWithResponse call +func ParsePostSpacesUpdateObjectsSpacesResponse(rsp *http.Response) (*PostSpacesUpdateObjectsSpacesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetFleetEnrollmentApiKeysResponse(rsp) + + response := &PostSpacesUpdateObjectsSpacesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// GetFleetEpmPackagesWithResponse request returning *GetFleetEpmPackagesResponse -func (c *ClientWithResponses) GetFleetEpmPackagesWithResponse(ctx context.Context, params *GetFleetEpmPackagesParams, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesResponse, error) { - rsp, err := c.GetFleetEpmPackages(ctx, params, reqEditors...) +// ParseGetSpacesSpaceResponse parses an HTTP response from a GetSpacesSpaceWithResponse call +func ParseGetSpacesSpaceResponse(rsp *http.Response) (*GetSpacesSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetFleetEpmPackagesResponse(rsp) + + response := &GetSpacesSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PostFleetEpmPackagesWithBodyWithResponse request with arbitrary body returning *PostFleetEpmPackagesResponse -func (c *ClientWithResponses) PostFleetEpmPackagesWithBodyWithResponse(ctx context.Context, params *PostFleetEpmPackagesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesResponse, error) { - rsp, err := c.PostFleetEpmPackagesWithBody(ctx, params, contentType, body, reqEditors...) +// ParsePostSpacesSpaceResponse parses an HTTP response from a PostSpacesSpaceWithResponse call +func ParsePostSpacesSpaceResponse(rsp *http.Response) (*PostSpacesSpaceResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetEpmPackagesResponse(rsp) + + response := &PostSpacesSpaceResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// DeleteFleetEpmPackagesPkgnamePkgversionWithResponse request returning *DeleteFleetEpmPackagesPkgnamePkgversionResponse -func (c *ClientWithResponses) DeleteFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *DeleteFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*DeleteFleetEpmPackagesPkgnamePkgversionResponse, error) { - rsp, err := c.DeleteFleetEpmPackagesPkgnamePkgversion(ctx, pkgName, pkgVersion, params, reqEditors...) +// ParseDeleteSpacesSpaceIdResponse parses an HTTP response from a DeleteSpacesSpaceIdWithResponse call +func ParseDeleteSpacesSpaceIdResponse(rsp *http.Response) (*DeleteSpacesSpaceIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseDeleteFleetEpmPackagesPkgnamePkgversionResponse(rsp) + + response := &DeleteSpacesSpaceIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// GetFleetEpmPackagesPkgnamePkgversionWithResponse request returning *GetFleetEpmPackagesPkgnamePkgversionResponse -func (c *ClientWithResponses) GetFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *GetFleetEpmPackagesPkgnamePkgversionParams, reqEditors ...RequestEditorFn) (*GetFleetEpmPackagesPkgnamePkgversionResponse, error) { - rsp, err := c.GetFleetEpmPackagesPkgnamePkgversion(ctx, pkgName, pkgVersion, params, reqEditors...) +// ParseGetSpacesSpaceIdResponse parses an HTTP response from a GetSpacesSpaceIdWithResponse call +func ParseGetSpacesSpaceIdResponse(rsp *http.Response) (*GetSpacesSpaceIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetFleetEpmPackagesPkgnamePkgversionResponse(rsp) + + response := &GetSpacesSpaceIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PostFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse request with arbitrary body returning *PostFleetEpmPackagesPkgnamePkgversionResponse -func (c *ClientWithResponses) PostFleetEpmPackagesPkgnamePkgversionWithBodyWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionResponse, error) { - rsp, err := c.PostFleetEpmPackagesPkgnamePkgversionWithBody(ctx, pkgName, pkgVersion, params, contentType, body, reqEditors...) +// ParsePutSpacesSpaceIdResponse parses an HTTP response from a PutSpacesSpaceIdWithResponse call +func ParsePutSpacesSpaceIdResponse(rsp *http.Response) (*PutSpacesSpaceIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetEpmPackagesPkgnamePkgversionResponse(rsp) + + response := &PutSpacesSpaceIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -func (c *ClientWithResponses) PostFleetEpmPackagesPkgnamePkgversionWithResponse(ctx context.Context, pkgName string, pkgVersion string, params *PostFleetEpmPackagesPkgnamePkgversionParams, body PostFleetEpmPackagesPkgnamePkgversionJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetEpmPackagesPkgnamePkgversionResponse, error) { - rsp, err := c.PostFleetEpmPackagesPkgnamePkgversion(ctx, pkgName, pkgVersion, params, body, reqEditors...) +// ParseGetStatusResponse parses an HTTP response from a GetStatusWithResponse call +func ParseGetStatusResponse(rsp *http.Response) (*GetStatusResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetEpmPackagesPkgnamePkgversionResponse(rsp) + + response := &GetStatusResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 503: + var dest struct { + union json.RawMessage + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON503 = &dest + + } + + return response, nil } -// GetFleetFleetServerHostsWithResponse request returning *GetFleetFleetServerHostsResponse -func (c *ClientWithResponses) GetFleetFleetServerHostsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetFleetServerHostsResponse, error) { - rsp, err := c.GetFleetFleetServerHosts(ctx, reqEditors...) +// ParseGetStreamsResponse parses an HTTP response from a GetStreamsWithResponse call +func ParseGetStreamsResponse(rsp *http.Response) (*GetStreamsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetFleetFleetServerHostsResponse(rsp) + + response := &GetStreamsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PostFleetFleetServerHostsWithBodyWithResponse request with arbitrary body returning *PostFleetFleetServerHostsResponse -func (c *ClientWithResponses) PostFleetFleetServerHostsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetFleetServerHostsResponse, error) { - rsp, err := c.PostFleetFleetServerHostsWithBody(ctx, contentType, body, reqEditors...) +// ParsePostStreamsDisableResponse parses an HTTP response from a PostStreamsDisableWithResponse call +func ParsePostStreamsDisableResponse(rsp *http.Response) (*PostStreamsDisableResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetFleetServerHostsResponse(rsp) + + response := &PostStreamsDisableResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -func (c *ClientWithResponses) PostFleetFleetServerHostsWithResponse(ctx context.Context, body PostFleetFleetServerHostsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetFleetServerHostsResponse, error) { - rsp, err := c.PostFleetFleetServerHosts(ctx, body, reqEditors...) +// ParsePostStreamsEnableResponse parses an HTTP response from a PostStreamsEnableWithResponse call +func ParsePostStreamsEnableResponse(rsp *http.Response) (*PostStreamsEnableResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetFleetServerHostsResponse(rsp) + + response := &PostStreamsEnableResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// DeleteFleetFleetServerHostsItemidWithResponse request returning *DeleteFleetFleetServerHostsItemidResponse -func (c *ClientWithResponses) DeleteFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*DeleteFleetFleetServerHostsItemidResponse, error) { - rsp, err := c.DeleteFleetFleetServerHostsItemid(ctx, itemId, reqEditors...) +// ParsePostStreamsResyncResponse parses an HTTP response from a PostStreamsResyncWithResponse call +func ParsePostStreamsResyncResponse(rsp *http.Response) (*PostStreamsResyncResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseDeleteFleetFleetServerHostsItemidResponse(rsp) + + response := &PostStreamsResyncResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// GetFleetFleetServerHostsItemidWithResponse request returning *GetFleetFleetServerHostsItemidResponse -func (c *ClientWithResponses) GetFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, reqEditors ...RequestEditorFn) (*GetFleetFleetServerHostsItemidResponse, error) { - rsp, err := c.GetFleetFleetServerHostsItemid(ctx, itemId, reqEditors...) +// ParseDeleteStreamsNameResponse parses an HTTP response from a DeleteStreamsNameWithResponse call +func ParseDeleteStreamsNameResponse(rsp *http.Response) (*DeleteStreamsNameResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetFleetFleetServerHostsItemidResponse(rsp) + + response := &DeleteStreamsNameResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PutFleetFleetServerHostsItemidWithBodyWithResponse request with arbitrary body returning *PutFleetFleetServerHostsItemidResponse -func (c *ClientWithResponses) PutFleetFleetServerHostsItemidWithBodyWithResponse(ctx context.Context, itemId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetFleetServerHostsItemidResponse, error) { - rsp, err := c.PutFleetFleetServerHostsItemidWithBody(ctx, itemId, contentType, body, reqEditors...) +// ParseGetStreamsNameResponse parses an HTTP response from a GetStreamsNameWithResponse call +func ParseGetStreamsNameResponse(rsp *http.Response) (*GetStreamsNameResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutFleetFleetServerHostsItemidResponse(rsp) + + response := &GetStreamsNameResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -func (c *ClientWithResponses) PutFleetFleetServerHostsItemidWithResponse(ctx context.Context, itemId string, body PutFleetFleetServerHostsItemidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetFleetServerHostsItemidResponse, error) { - rsp, err := c.PutFleetFleetServerHostsItemid(ctx, itemId, body, reqEditors...) +// ParsePutStreamsNameResponse parses an HTTP response from a PutStreamsNameWithResponse call +func ParsePutStreamsNameResponse(rsp *http.Response) (*PutStreamsNameResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutFleetFleetServerHostsItemidResponse(rsp) + + response := &PutStreamsNameResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// GetFleetOutputsWithResponse request returning *GetFleetOutputsResponse -func (c *ClientWithResponses) GetFleetOutputsWithResponse(ctx context.Context, reqEditors ...RequestEditorFn) (*GetFleetOutputsResponse, error) { - rsp, err := c.GetFleetOutputs(ctx, reqEditors...) +// ParsePostStreamsNameForkResponse parses an HTTP response from a PostStreamsNameForkWithResponse call +func ParsePostStreamsNameForkResponse(rsp *http.Response) (*PostStreamsNameForkResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetFleetOutputsResponse(rsp) -} -// PostFleetOutputsWithBodyWithResponse request with arbitrary body returning *PostFleetOutputsResponse -func (c *ClientWithResponses) PostFleetOutputsWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetOutputsResponse, error) { - rsp, err := c.PostFleetOutputsWithBody(ctx, contentType, body, reqEditors...) - if err != nil { - return nil, err + response := &PostStreamsNameForkResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return ParsePostFleetOutputsResponse(rsp) + + return response, nil } -func (c *ClientWithResponses) PostFleetOutputsWithResponse(ctx context.Context, body PostFleetOutputsJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetOutputsResponse, error) { - rsp, err := c.PostFleetOutputs(ctx, body, reqEditors...) +// ParseGetStreamsNameGroupResponse parses an HTTP response from a GetStreamsNameGroupWithResponse call +func ParseGetStreamsNameGroupResponse(rsp *http.Response) (*GetStreamsNameGroupResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetOutputsResponse(rsp) -} -// DeleteFleetOutputsOutputidWithResponse request returning *DeleteFleetOutputsOutputidResponse -func (c *ClientWithResponses) DeleteFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*DeleteFleetOutputsOutputidResponse, error) { - rsp, err := c.DeleteFleetOutputsOutputid(ctx, outputId, reqEditors...) - if err != nil { - return nil, err + response := &GetStreamsNameGroupResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return ParseDeleteFleetOutputsOutputidResponse(rsp) + + return response, nil } -// GetFleetOutputsOutputidWithResponse request returning *GetFleetOutputsOutputidResponse -func (c *ClientWithResponses) GetFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, reqEditors ...RequestEditorFn) (*GetFleetOutputsOutputidResponse, error) { - rsp, err := c.GetFleetOutputsOutputid(ctx, outputId, reqEditors...) +// ParsePutStreamsNameGroupResponse parses an HTTP response from a PutStreamsNameGroupWithResponse call +func ParsePutStreamsNameGroupResponse(rsp *http.Response) (*PutStreamsNameGroupResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetFleetOutputsOutputidResponse(rsp) + + response := &PutStreamsNameGroupResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PutFleetOutputsOutputidWithBodyWithResponse request with arbitrary body returning *PutFleetOutputsOutputidResponse -func (c *ClientWithResponses) PutFleetOutputsOutputidWithBodyWithResponse(ctx context.Context, outputId string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetOutputsOutputidResponse, error) { - rsp, err := c.PutFleetOutputsOutputidWithBody(ctx, outputId, contentType, body, reqEditors...) +// ParseGetStreamsNameIngestResponse parses an HTTP response from a GetStreamsNameIngestWithResponse call +func ParseGetStreamsNameIngestResponse(rsp *http.Response) (*GetStreamsNameIngestResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutFleetOutputsOutputidResponse(rsp) + + response := &GetStreamsNameIngestResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -func (c *ClientWithResponses) PutFleetOutputsOutputidWithResponse(ctx context.Context, outputId string, body PutFleetOutputsOutputidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetOutputsOutputidResponse, error) { - rsp, err := c.PutFleetOutputsOutputid(ctx, outputId, body, reqEditors...) +// ParsePutStreamsNameIngestResponse parses an HTTP response from a PutStreamsNameIngestWithResponse call +func ParsePutStreamsNameIngestResponse(rsp *http.Response) (*PutStreamsNameIngestResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutFleetOutputsOutputidResponse(rsp) + + response := &PutStreamsNameIngestResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// GetFleetPackagePoliciesWithResponse request returning *GetFleetPackagePoliciesResponse -func (c *ClientWithResponses) GetFleetPackagePoliciesWithResponse(ctx context.Context, params *GetFleetPackagePoliciesParams, reqEditors ...RequestEditorFn) (*GetFleetPackagePoliciesResponse, error) { - rsp, err := c.GetFleetPackagePolicies(ctx, params, reqEditors...) +// ParsePostStreamsNameContentExportResponse parses an HTTP response from a PostStreamsNameContentExportWithResponse call +func ParsePostStreamsNameContentExportResponse(rsp *http.Response) (*PostStreamsNameContentExportResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetFleetPackagePoliciesResponse(rsp) + + response := &PostStreamsNameContentExportResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PostFleetPackagePoliciesWithBodyWithResponse request with arbitrary body returning *PostFleetPackagePoliciesResponse -func (c *ClientWithResponses) PostFleetPackagePoliciesWithBodyWithResponse(ctx context.Context, params *PostFleetPackagePoliciesParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesResponse, error) { - rsp, err := c.PostFleetPackagePoliciesWithBody(ctx, params, contentType, body, reqEditors...) +// ParsePostStreamsNameContentImportResponse parses an HTTP response from a PostStreamsNameContentImportWithResponse call +func ParsePostStreamsNameContentImportResponse(rsp *http.Response) (*PostStreamsNameContentImportResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetPackagePoliciesResponse(rsp) + + response := &PostStreamsNameContentImportResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -func (c *ClientWithResponses) PostFleetPackagePoliciesWithResponse(ctx context.Context, params *PostFleetPackagePoliciesParams, body PostFleetPackagePoliciesJSONRequestBody, reqEditors ...RequestEditorFn) (*PostFleetPackagePoliciesResponse, error) { - rsp, err := c.PostFleetPackagePolicies(ctx, params, body, reqEditors...) +// ParseGetStreamsNameDashboardsResponse parses an HTTP response from a GetStreamsNameDashboardsWithResponse call +func ParseGetStreamsNameDashboardsResponse(rsp *http.Response) (*GetStreamsNameDashboardsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostFleetPackagePoliciesResponse(rsp) + + response := &GetStreamsNameDashboardsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// DeleteFleetPackagePoliciesPackagepolicyidWithResponse request returning *DeleteFleetPackagePoliciesPackagepolicyidResponse -func (c *ClientWithResponses) DeleteFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *DeleteFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*DeleteFleetPackagePoliciesPackagepolicyidResponse, error) { - rsp, err := c.DeleteFleetPackagePoliciesPackagepolicyid(ctx, packagePolicyId, params, reqEditors...) +// ParsePostStreamsNameDashboardsBulkResponse parses an HTTP response from a PostStreamsNameDashboardsBulkWithResponse call +func ParsePostStreamsNameDashboardsBulkResponse(rsp *http.Response) (*PostStreamsNameDashboardsBulkResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseDeleteFleetPackagePoliciesPackagepolicyidResponse(rsp) + + response := &PostStreamsNameDashboardsBulkResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// GetFleetPackagePoliciesPackagepolicyidWithResponse request returning *GetFleetPackagePoliciesPackagepolicyidResponse -func (c *ClientWithResponses) GetFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *GetFleetPackagePoliciesPackagepolicyidParams, reqEditors ...RequestEditorFn) (*GetFleetPackagePoliciesPackagepolicyidResponse, error) { - rsp, err := c.GetFleetPackagePoliciesPackagepolicyid(ctx, packagePolicyId, params, reqEditors...) +// ParseDeleteStreamsNameDashboardsDashboardidResponse parses an HTTP response from a DeleteStreamsNameDashboardsDashboardidWithResponse call +func ParseDeleteStreamsNameDashboardsDashboardidResponse(rsp *http.Response) (*DeleteStreamsNameDashboardsDashboardidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetFleetPackagePoliciesPackagepolicyidResponse(rsp) + + response := &DeleteStreamsNameDashboardsDashboardidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PutFleetPackagePoliciesPackagepolicyidWithBodyWithResponse request with arbitrary body returning *PutFleetPackagePoliciesPackagepolicyidResponse -func (c *ClientWithResponses) PutFleetPackagePoliciesPackagepolicyidWithBodyWithResponse(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutFleetPackagePoliciesPackagepolicyidResponse, error) { - rsp, err := c.PutFleetPackagePoliciesPackagepolicyidWithBody(ctx, packagePolicyId, params, contentType, body, reqEditors...) +// ParsePutStreamsNameDashboardsDashboardidResponse parses an HTTP response from a PutStreamsNameDashboardsDashboardidWithResponse call +func ParsePutStreamsNameDashboardsDashboardidResponse(rsp *http.Response) (*PutStreamsNameDashboardsDashboardidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutFleetPackagePoliciesPackagepolicyidResponse(rsp) + + response := &PutStreamsNameDashboardsDashboardidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -func (c *ClientWithResponses) PutFleetPackagePoliciesPackagepolicyidWithResponse(ctx context.Context, packagePolicyId string, params *PutFleetPackagePoliciesPackagepolicyidParams, body PutFleetPackagePoliciesPackagepolicyidJSONRequestBody, reqEditors ...RequestEditorFn) (*PutFleetPackagePoliciesPackagepolicyidResponse, error) { - rsp, err := c.PutFleetPackagePoliciesPackagepolicyid(ctx, packagePolicyId, params, body, reqEditors...) +// ParseGetStreamsNameQueriesResponse parses an HTTP response from a GetStreamsNameQueriesWithResponse call +func ParseGetStreamsNameQueriesResponse(rsp *http.Response) (*GetStreamsNameQueriesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutFleetPackagePoliciesPackagepolicyidResponse(rsp) + + response := &GetStreamsNameQueriesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PostParametersWithBodyWithResponse request with arbitrary body returning *PostParametersResponse -func (c *ClientWithResponses) PostParametersWithBodyWithResponse(ctx context.Context, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostParametersResponse, error) { - rsp, err := c.PostParametersWithBody(ctx, contentType, body, reqEditors...) +// ParsePostStreamsNameQueriesBulkResponse parses an HTTP response from a PostStreamsNameQueriesBulkWithResponse call +func ParsePostStreamsNameQueriesBulkResponse(rsp *http.Response) (*PostStreamsNameQueriesBulkResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostParametersResponse(rsp) + + response := &PostStreamsNameQueriesBulkResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -func (c *ClientWithResponses) PostParametersWithResponse(ctx context.Context, body PostParametersJSONRequestBody, reqEditors ...RequestEditorFn) (*PostParametersResponse, error) { - rsp, err := c.PostParameters(ctx, body, reqEditors...) +// ParseDeleteStreamsNameQueriesQueryidResponse parses an HTTP response from a DeleteStreamsNameQueriesQueryidWithResponse call +func ParseDeleteStreamsNameQueriesQueryidResponse(rsp *http.Response) (*DeleteStreamsNameQueriesQueryidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostParametersResponse(rsp) + + response := &DeleteStreamsNameQueriesQueryidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// DeleteParameterWithResponse request returning *DeleteParameterResponse -func (c *ClientWithResponses) DeleteParameterWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*DeleteParameterResponse, error) { - rsp, err := c.DeleteParameter(ctx, id, reqEditors...) +// ParsePutStreamsNameQueriesQueryidResponse parses an HTTP response from a PutStreamsNameQueriesQueryidWithResponse call +func ParsePutStreamsNameQueriesQueryidResponse(rsp *http.Response) (*PutStreamsNameQueriesQueryidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseDeleteParameterResponse(rsp) + + response := &PutStreamsNameQueriesQueryidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// GetParameterWithResponse request returning *GetParameterResponse -func (c *ClientWithResponses) GetParameterWithResponse(ctx context.Context, id string, reqEditors ...RequestEditorFn) (*GetParameterResponse, error) { - rsp, err := c.GetParameter(ctx, id, reqEditors...) +// ParseGetStreamsNameRulesResponse parses an HTTP response from a GetStreamsNameRulesWithResponse call +func ParseGetStreamsNameRulesResponse(rsp *http.Response) (*GetStreamsNameRulesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetParameterResponse(rsp) + + response := &GetStreamsNameRulesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PutParameterWithBodyWithResponse request with arbitrary body returning *PutParameterResponse -func (c *ClientWithResponses) PutParameterWithBodyWithResponse(ctx context.Context, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutParameterResponse, error) { - rsp, err := c.PutParameterWithBody(ctx, id, contentType, body, reqEditors...) +// ParseDeleteStreamsNameRulesRuleidResponse parses an HTTP response from a DeleteStreamsNameRulesRuleidWithResponse call +func ParseDeleteStreamsNameRulesRuleidResponse(rsp *http.Response) (*DeleteStreamsNameRulesRuleidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutParameterResponse(rsp) + + response := &DeleteStreamsNameRulesRuleidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -func (c *ClientWithResponses) PutParameterWithResponse(ctx context.Context, id string, body PutParameterJSONRequestBody, reqEditors ...RequestEditorFn) (*PutParameterResponse, error) { - rsp, err := c.PutParameter(ctx, id, body, reqEditors...) +// ParsePutStreamsNameRulesRuleidResponse parses an HTTP response from a PutStreamsNameRulesRuleidWithResponse call +func ParsePutStreamsNameRulesRuleidResponse(rsp *http.Response) (*PutStreamsNameRulesRuleidResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutParameterResponse(rsp) + + response := &PutStreamsNameRulesRuleidResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// DeleteActionsConnectorIdWithResponse request returning *DeleteActionsConnectorIdResponse -func (c *ClientWithResponses) DeleteActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteActionsConnectorIdResponse, error) { - rsp, err := c.DeleteActionsConnectorId(ctx, spaceId, id, reqEditors...) +// ParseGetStreamsNameSignificantEventsResponse parses an HTTP response from a GetStreamsNameSignificantEventsWithResponse call +func ParseGetStreamsNameSignificantEventsResponse(rsp *http.Response) (*GetStreamsNameSignificantEventsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseDeleteActionsConnectorIdResponse(rsp) + + response := &GetStreamsNameSignificantEventsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// GetActionsConnectorIdWithResponse request returning *GetActionsConnectorIdResponse -func (c *ClientWithResponses) GetActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetActionsConnectorIdResponse, error) { - rsp, err := c.GetActionsConnectorId(ctx, spaceId, id, reqEditors...) +// ParseGetStreamsNameSignificantEventsGenerateResponse parses an HTTP response from a GetStreamsNameSignificantEventsGenerateWithResponse call +func ParseGetStreamsNameSignificantEventsGenerateResponse(rsp *http.Response) (*GetStreamsNameSignificantEventsGenerateResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetActionsConnectorIdResponse(rsp) + + response := &GetStreamsNameSignificantEventsGenerateResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// PostActionsConnectorIdWithBodyWithResponse request with arbitrary body returning *PostActionsConnectorIdResponse -func (c *ClientWithResponses) PostActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) { - rsp, err := c.PostActionsConnectorIdWithBody(ctx, spaceId, id, contentType, body, reqEditors...) +// ParsePostStreamsNameSignificantEventsPreviewResponse parses an HTTP response from a PostStreamsNameSignificantEventsPreviewWithResponse call +func ParsePostStreamsNameSignificantEventsPreviewResponse(rsp *http.Response) (*PostStreamsNameSignificantEventsPreviewResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostActionsConnectorIdResponse(rsp) + + response := &PostStreamsNameSignificantEventsPreviewResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -func (c *ClientWithResponses) PostActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PostActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PostActionsConnectorIdResponse, error) { - rsp, err := c.PostActionsConnectorId(ctx, spaceId, id, body, reqEditors...) +// ParsePostSyntheticsMonitorTestResponse parses an HTTP response from a PostSyntheticsMonitorTestWithResponse call +func ParsePostSyntheticsMonitorTestResponse(rsp *http.Response) (*PostSyntheticsMonitorTestResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostActionsConnectorIdResponse(rsp) + + response := &PostSyntheticsMonitorTestResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest struct { + // Errors Array of errors encountered while triggering the test, one per service location. + Errors *[]struct { + Error struct { + // FailedMonitors Optional list of monitors that failed at the location. + FailedMonitors *[]map[string]interface{} `json:"failed_monitors"` + + // Reason Human-readable explanation of the failure. + Reason string `json:"reason"` + + // Status HTTP status code returned by the agent. + Status int `json:"status"` + } `json:"error"` + + // LocationId Identifier of the service location where the error occurred. + LocationId string `json:"locationId"` + } `json:"errors,omitempty"` + + // TestRunId Unique identifier for the triggered test run. + TestRunId string `json:"testRunId"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -// PutActionsConnectorIdWithBodyWithResponse request with arbitrary body returning *PutActionsConnectorIdResponse -func (c *ClientWithResponses) PutActionsConnectorIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) { - rsp, err := c.PutActionsConnectorIdWithBody(ctx, spaceId, id, contentType, body, reqEditors...) +// ParseGetSyntheticMonitorsResponse parses an HTTP response from a GetSyntheticMonitorsWithResponse call +func ParseGetSyntheticMonitorsResponse(rsp *http.Response) (*GetSyntheticMonitorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutActionsConnectorIdResponse(rsp) + + response := &GetSyntheticMonitorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -func (c *ClientWithResponses) PutActionsConnectorIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PutActionsConnectorIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PutActionsConnectorIdResponse, error) { - rsp, err := c.PutActionsConnectorId(ctx, spaceId, id, body, reqEditors...) +// ParsePostSyntheticMonitorsResponse parses an HTTP response from a PostSyntheticMonitorsWithResponse call +func ParsePostSyntheticMonitorsResponse(rsp *http.Response) (*PostSyntheticMonitorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePutActionsConnectorIdResponse(rsp) + + response := &PostSyntheticMonitorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// GetActionsConnectorsWithResponse request returning *GetActionsConnectorsResponse -func (c *ClientWithResponses) GetActionsConnectorsWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetActionsConnectorsResponse, error) { - rsp, err := c.GetActionsConnectors(ctx, spaceId, reqEditors...) +// ParseDeleteSyntheticMonitorsResponse parses an HTTP response from a DeleteSyntheticMonitorsWithResponse call +func ParseDeleteSyntheticMonitorsResponse(rsp *http.Response) (*DeleteSyntheticMonitorsResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetActionsConnectorsResponse(rsp) + + response := &DeleteSyntheticMonitorsResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []struct { + // Deleted If it is `true`, the monitor was successfully deleted If it is `false`, the monitor was not deleted. + Deleted *bool `json:"deleted,omitempty"` + + // Ids The unique identifier of the deleted monitor. + Ids *string `json:"ids,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -// GetAllDataViewsDefaultWithResponse request returning *GetAllDataViewsDefaultResponse -func (c *ClientWithResponses) GetAllDataViewsDefaultWithResponse(ctx context.Context, spaceId SpaceId, reqEditors ...RequestEditorFn) (*GetAllDataViewsDefaultResponse, error) { - rsp, err := c.GetAllDataViewsDefault(ctx, spaceId, reqEditors...) +// ParseDeleteSyntheticMonitorResponse parses an HTTP response from a DeleteSyntheticMonitorWithResponse call +func ParseDeleteSyntheticMonitorResponse(rsp *http.Response) (*DeleteSyntheticMonitorResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetAllDataViewsDefaultResponse(rsp) + + response := &DeleteSyntheticMonitorResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// CreateDataViewDefaultwWithBodyWithResponse request with arbitrary body returning *CreateDataViewDefaultwResponse -func (c *ClientWithResponses) CreateDataViewDefaultwWithBodyWithResponse(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*CreateDataViewDefaultwResponse, error) { - rsp, err := c.CreateDataViewDefaultwWithBody(ctx, spaceId, contentType, body, reqEditors...) +// ParseGetSyntheticMonitorResponse parses an HTTP response from a GetSyntheticMonitorWithResponse call +func ParseGetSyntheticMonitorResponse(rsp *http.Response) (*GetSyntheticMonitorResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseCreateDataViewDefaultwResponse(rsp) + + response := &GetSyntheticMonitorResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -func (c *ClientWithResponses) CreateDataViewDefaultwWithResponse(ctx context.Context, spaceId SpaceId, body CreateDataViewDefaultwJSONRequestBody, reqEditors ...RequestEditorFn) (*CreateDataViewDefaultwResponse, error) { - rsp, err := c.CreateDataViewDefaultw(ctx, spaceId, body, reqEditors...) +// ParsePutSyntheticMonitorResponse parses an HTTP response from a PutSyntheticMonitorWithResponse call +func ParsePutSyntheticMonitorResponse(rsp *http.Response) (*PutSyntheticMonitorResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseCreateDataViewDefaultwResponse(rsp) + + response := &PutSyntheticMonitorResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// DeleteDataViewDefaultWithResponse request returning *DeleteDataViewDefaultResponse -func (c *ClientWithResponses) DeleteDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*DeleteDataViewDefaultResponse, error) { - rsp, err := c.DeleteDataViewDefault(ctx, spaceId, viewId, reqEditors...) +// ParseGetParametersResponse parses an HTTP response from a GetParametersWithResponse call +func ParseGetParametersResponse(rsp *http.Response) (*GetParametersResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseDeleteDataViewDefaultResponse(rsp) + + response := &GetParametersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []SyntheticsGetParameterResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -// GetDataViewDefaultWithResponse request returning *GetDataViewDefaultResponse -func (c *ClientWithResponses) GetDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, reqEditors ...RequestEditorFn) (*GetDataViewDefaultResponse, error) { - rsp, err := c.GetDataViewDefault(ctx, spaceId, viewId, reqEditors...) +// ParsePostParametersResponse parses an HTTP response from a PostParametersWithResponse call +func ParsePostParametersResponse(rsp *http.Response) (*PostParametersResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetDataViewDefaultResponse(rsp) + + response := &PostParametersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest CreateParamResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -// UpdateDataViewDefaultWithBodyWithResponse request with arbitrary body returning *UpdateDataViewDefaultResponse -func (c *ClientWithResponses) UpdateDataViewDefaultWithBodyWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*UpdateDataViewDefaultResponse, error) { - rsp, err := c.UpdateDataViewDefaultWithBody(ctx, spaceId, viewId, contentType, body, reqEditors...) +// ParseDeleteParametersResponse parses an HTTP response from a DeleteParametersWithResponse call +func ParseDeleteParametersResponse(rsp *http.Response) (*DeleteParametersResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseUpdateDataViewDefaultResponse(rsp) + + response := &DeleteParametersResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest []struct { + // Deleted Indicates whether the parameter was successfully deleted. It is `true` if it was deleted. It is `false` if it was not deleted. + Deleted *bool `json:"deleted,omitempty"` + + // Id The unique identifier for the deleted parameter. + Id *string `json:"id,omitempty"` + } + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + + } + + return response, nil } -func (c *ClientWithResponses) UpdateDataViewDefaultWithResponse(ctx context.Context, spaceId SpaceId, viewId DataViewsViewId, body UpdateDataViewDefaultJSONRequestBody, reqEditors ...RequestEditorFn) (*UpdateDataViewDefaultResponse, error) { - rsp, err := c.UpdateDataViewDefault(ctx, spaceId, viewId, body, reqEditors...) +// ParseDeleteParameterResponse parses an HTTP response from a DeleteParameterWithResponse call +func ParseDeleteParameterResponse(rsp *http.Response) (*DeleteParameterResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseUpdateDataViewDefaultResponse(rsp) + + response := &DeleteParameterResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + return response, nil } -// ParseDeleteAgentConfigurationResponse parses an HTTP response from a DeleteAgentConfigurationWithResponse call -func ParseDeleteAgentConfigurationResponse(rsp *http.Response) (*DeleteAgentConfigurationResponse, error) { +// ParseGetParameterResponse parses an HTTP response from a GetParameterWithResponse call +func ParseGetParameterResponse(rsp *http.Response) (*GetParameterResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &DeleteAgentConfigurationResponse{ + response := &GetParameterResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest APMUIDeleteAgentConfigurationsResponse + var dest SyntheticsGetParameterResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest APMUI400Response - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest + } - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest APMUI401Response - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest + return response, nil +} - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: - var dest APMUI403Response - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON403 = &dest +// ParsePutParameterResponse parses an HTTP response from a PutParameterWithResponse call +func ParsePutParameterResponse(rsp *http.Response) (*PutParameterResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest APMUI404Response + response := &PutParameterResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest map[string]interface{} if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON404 = &dest + response.JSON200 = &dest } return response, nil } -// ParseGetAgentConfigurationsResponse parses an HTTP response from a GetAgentConfigurationsWithResponse call -func ParseGetAgentConfigurationsResponse(rsp *http.Response) (*GetAgentConfigurationsResponse, error) { +// ParseGetPrivateLocationsResponse parses an HTTP response from a GetPrivateLocationsWithResponse call +func ParseGetPrivateLocationsResponse(rsp *http.Response) (*GetPrivateLocationsResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetAgentConfigurationsResponse{ + response := &GetPrivateLocationsResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest APMUIAgentConfigurationsResponse + var dest []SyntheticsGetPrivateLocation if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest APMUI400Response - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest APMUI401Response - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest APMUI404Response - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - } return response, nil } -// ParseCreateUpdateAgentConfigurationResponse parses an HTTP response from a CreateUpdateAgentConfigurationWithResponse call -func ParseCreateUpdateAgentConfigurationResponse(rsp *http.Response) (*CreateUpdateAgentConfigurationResponse, error) { +// ParsePostPrivateLocationResponse parses an HTTP response from a PostPrivateLocationWithResponse call +func ParsePostPrivateLocationResponse(rsp *http.Response) (*PostPrivateLocationResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &CreateUpdateAgentConfigurationResponse{ + response := &PostPrivateLocationResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -24206,414 +142129,308 @@ func ParseCreateUpdateAgentConfigurationResponse(rsp *http.Response) (*CreateUpd } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest APMUI400Response - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: - var dest APMUI401Response - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON401 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: - var dest APMUI403Response - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON403 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest APMUI404Response - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON404 = &dest - } return response, nil } -// PostMaintenanceWindowWithBodyWithResponse request with arbitrary body returning *PostMaintenanceWindowResponse -func (c *ClientWithResponses) PostMaintenanceWindowWithBodyWithResponse(ctx context.Context, spaceId SpaceId, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) { - rsp, err := c.PostMaintenanceWindowWithBody(ctx, spaceId, contentType, body, reqEditors...) +// ParseDeletePrivateLocationResponse parses an HTTP response from a DeletePrivateLocationWithResponse call +func ParseDeletePrivateLocationResponse(rsp *http.Response) (*DeletePrivateLocationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParsePostMaintenanceWindowResponse(rsp) -} -func (c *ClientWithResponses) PostMaintenanceWindowWithResponse(ctx context.Context, spaceId SpaceId, body PostMaintenanceWindowJSONRequestBody, reqEditors ...RequestEditorFn) (*PostMaintenanceWindowResponse, error) { - rsp, err := c.PostMaintenanceWindow(ctx, spaceId, body, reqEditors...) - if err != nil { - return nil, err + response := &DeletePrivateLocationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return ParsePostMaintenanceWindowResponse(rsp) -} -// DeleteMaintenanceWindowIdWithResponse request returning *DeleteMaintenanceWindowIdResponse -func (c *ClientWithResponses) DeleteMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*DeleteMaintenanceWindowIdResponse, error) { - rsp, err := c.DeleteMaintenanceWindowId(ctx, spaceId, id, reqEditors...) - if err != nil { - return nil, err - } - return ParseDeleteMaintenanceWindowIdResponse(rsp) + return response, nil } -// GetMaintenanceWindowIdWithResponse request returning *GetMaintenanceWindowIdResponse -func (c *ClientWithResponses) GetMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, reqEditors ...RequestEditorFn) (*GetMaintenanceWindowIdResponse, error) { - rsp, err := c.GetMaintenanceWindowId(ctx, spaceId, id, reqEditors...) +// ParseGetPrivateLocationResponse parses an HTTP response from a GetPrivateLocationWithResponse call +func ParseGetPrivateLocationResponse(rsp *http.Response) (*GetPrivateLocationResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - return ParseGetMaintenanceWindowIdResponse(rsp) -} -// PatchMaintenanceWindowIdWithBodyWithResponse request with arbitrary body returning *PatchMaintenanceWindowIdResponse -func (c *ClientWithResponses) PatchMaintenanceWindowIdWithBodyWithResponse(ctx context.Context, spaceId SpaceId, id string, contentType string, body io.Reader, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) { - rsp, err := c.PatchMaintenanceWindowIdWithBody(ctx, spaceId, id, contentType, body, reqEditors...) - if err != nil { - return nil, err + response := &GetPrivateLocationResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } - return ParsePatchMaintenanceWindowIdResponse(rsp) -} -func (c *ClientWithResponses) PatchMaintenanceWindowIdWithResponse(ctx context.Context, spaceId SpaceId, id string, body PatchMaintenanceWindowIdJSONRequestBody, reqEditors ...RequestEditorFn) (*PatchMaintenanceWindowIdResponse, error) { - rsp, err := c.PatchMaintenanceWindowId(ctx, spaceId, id, body, reqEditors...) - if err != nil { - return nil, err + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SyntheticsGetPrivateLocation + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest + } - return ParsePatchMaintenanceWindowIdResponse(rsp) + + return response, nil } -// ParseGetFleetAgentPoliciesResponse parses an HTTP response from a GetFleetAgentPoliciesWithResponse call -func ParseGetFleetAgentPoliciesResponse(rsp *http.Response) (*GetFleetAgentPoliciesResponse, error) { +// ParsePutPrivateLocationResponse parses an HTTP response from a PutPrivateLocationWithResponse call +func ParsePutPrivateLocationResponse(rsp *http.Response) (*PutPrivateLocationResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetFleetAgentPoliciesResponse{ + response := &PutPrivateLocationResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Items []AgentPolicy `json:"items"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` - } + var dest SyntheticsGetPrivateLocation if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - } return response, nil } -// ParsePostFleetAgentPoliciesResponse parses an HTTP response from a PostFleetAgentPoliciesWithResponse call -func ParsePostFleetAgentPoliciesResponse(rsp *http.Response) (*PostFleetAgentPoliciesResponse, error) { +// ParseTaskManagerHealthResponse parses an HTTP response from a TaskManagerHealthWithResponse call +func ParseTaskManagerHealthResponse(rsp *http.Response) (*TaskManagerHealthResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PostFleetAgentPoliciesResponse{ + response := &TaskManagerHealthResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item AgentPolicy `json:"item"` - } + var dest TaskManagerHealthAPIsHealthResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest + } + + return response, nil +} + +// ParseDeleteTimelinesResponse parses an HTTP response from a DeleteTimelinesWithResponse call +func ParseDeleteTimelinesResponse(rsp *http.Response) (*DeleteTimelinesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + response := &DeleteTimelinesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } return response, nil } -// ParsePostFleetAgentPoliciesDeleteResponse parses an HTTP response from a PostFleetAgentPoliciesDeleteWithResponse call -func ParsePostFleetAgentPoliciesDeleteResponse(rsp *http.Response) (*PostFleetAgentPoliciesDeleteResponse, error) { +// ParseGetTimelineResponse parses an HTTP response from a GetTimelineWithResponse call +func ParseGetTimelineResponse(rsp *http.Response) (*GetTimelineResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PostFleetAgentPoliciesDeleteResponse{ + response := &GetTimelineResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Id string `json:"id"` - Name string `json:"name"` - } + var dest SecurityTimelineAPITimelineResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - } return response, nil } -// ParseGetFleetAgentPoliciesAgentpolicyidResponse parses an HTTP response from a GetFleetAgentPoliciesAgentpolicyidWithResponse call -func ParseGetFleetAgentPoliciesAgentpolicyidResponse(rsp *http.Response) (*GetFleetAgentPoliciesAgentpolicyidResponse, error) { +// ParsePatchTimelineResponse parses an HTTP response from a PatchTimelineWithResponse call +func ParsePatchTimelineResponse(rsp *http.Response) (*PatchTimelineResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetFleetAgentPoliciesAgentpolicyidResponse{ + response := &PatchTimelineResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item AgentPolicy `json:"item"` - } + var dest SecurityTimelineAPIPersistTimelineResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 405: var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + // Body The error message + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON400 = &dest + response.JSON405 = &dest } return response, nil } -// ParsePutFleetAgentPoliciesAgentpolicyidResponse parses an HTTP response from a PutFleetAgentPoliciesAgentpolicyidWithResponse call -func ParsePutFleetAgentPoliciesAgentpolicyidResponse(rsp *http.Response) (*PutFleetAgentPoliciesAgentpolicyidResponse, error) { +// ParseCreateTimelinesResponse parses an HTTP response from a CreateTimelinesWithResponse call +func ParseCreateTimelinesResponse(rsp *http.Response) (*CreateTimelinesResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PutFleetAgentPoliciesAgentpolicyidResponse{ + response := &CreateTimelinesResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item AgentPolicy `json:"item"` - } + var dest SecurityTimelineAPIPersistTimelineResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 405: var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + // Body The error message + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON400 = &dest + response.JSON405 = &dest } return response, nil } -// ParseGetFleetEnrollmentApiKeysResponse parses an HTTP response from a GetFleetEnrollmentApiKeysWithResponse call -func ParseGetFleetEnrollmentApiKeysResponse(rsp *http.Response) (*GetFleetEnrollmentApiKeysResponse, error) { +// ParseCopyTimelineResponse parses an HTTP response from a CopyTimelineWithResponse call +func ParseCopyTimelineResponse(rsp *http.Response) (*CopyTimelineResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetFleetEnrollmentApiKeysResponse{ + response := &CopyTimelineResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Items []EnrollmentApiKey `json:"items"` - // Deprecated: this property has been marked as deprecated upstream, but no `x-deprecated-reason` was set - List []struct { - // Active When false, the enrollment API key is revoked and cannot be used for enrolling Elastic Agents. - Active bool `json:"active"` - - // ApiKey The enrollment API key (token) used for enrolling Elastic Agents. - ApiKey string `json:"api_key"` - - // ApiKeyId The ID of the API key in the Security API. - ApiKeyId string `json:"api_key_id"` - CreatedAt string `json:"created_at"` - Hidden *bool `json:"hidden,omitempty"` - Id string `json:"id"` - - // Name The name of the enrollment API key. - Name *string `json:"name,omitempty"` - - // PolicyId The ID of the agent policy the Elastic Agent will be enrolled in. - PolicyId *string `json:"policy_id,omitempty"` - } `json:"list"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` - } + var dest SecurityTimelineAPIPersistTimelineResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - } return response, nil } -// ParseGetFleetEpmPackagesResponse parses an HTTP response from a GetFleetEpmPackagesWithResponse call -func ParseGetFleetEpmPackagesResponse(rsp *http.Response) (*GetFleetEpmPackagesResponse, error) { +// ParseGetDraftTimelinesResponse parses an HTTP response from a GetDraftTimelinesWithResponse call +func ParseGetDraftTimelinesResponse(rsp *http.Response) (*GetDraftTimelinesResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetFleetEpmPackagesResponse{ + response := &GetDraftTimelinesResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Items []PackageListItem `json:"items"` - } + var dest SecurityTimelineAPIPersistTimelineResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } + } + + return response, nil +} + +// ParseCleanDraftTimelinesResponse parses an HTTP response from a CleanDraftTimelinesWithResponse call +func ParseCleanDraftTimelinesResponse(rsp *http.Response) (*CleanDraftTimelinesResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + + response := &CleanDraftTimelinesResponse{ + Body: bodyBytes, + HTTPResponse: rsp, + } + + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SecurityTimelineAPIPersistTimelineResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON400 = &dest + response.JSON200 = &dest } return response, nil } -// ParsePostFleetEpmPackagesResponse parses an HTTP response from a PostFleetEpmPackagesWithResponse call -func ParsePostFleetEpmPackagesResponse(rsp *http.Response) (*PostFleetEpmPackagesResponse, error) { +// ParseExportTimelinesResponse parses an HTTP response from a ExportTimelinesWithResponse call +func ParseExportTimelinesResponse(rsp *http.Response) (*ExportTimelinesResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PostFleetEpmPackagesResponse{ + response := &ExportTimelinesResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -24621,68 +142438,48 @@ func ParsePostFleetEpmPackagesResponse(rsp *http.Response) (*PostFleetEpmPackage return response, nil } -// ParseDeleteFleetEpmPackagesPkgnamePkgversionResponse parses an HTTP response from a DeleteFleetEpmPackagesPkgnamePkgversionWithResponse call -func ParseDeleteFleetEpmPackagesPkgnamePkgversionResponse(rsp *http.Response) (*DeleteFleetEpmPackagesPkgnamePkgversionResponse, error) { +// ParsePersistFavoriteRouteResponse parses an HTTP response from a PersistFavoriteRouteWithResponse call +func ParsePersistFavoriteRouteResponse(rsp *http.Response) (*PersistFavoriteRouteResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &DeleteFleetEpmPackagesPkgnamePkgversionResponse{ + response := &PersistFavoriteRouteResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Items []DeleteFleetEpmPackagesPkgnamePkgversion_200_Items_Item `json:"items"` - } + var dest SecurityTimelineAPIFavoriteTimelineResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - } return response, nil } -// ParseGetFleetEpmPackagesPkgnamePkgversionResponse parses an HTTP response from a GetFleetEpmPackagesPkgnamePkgversionWithResponse call -func ParseGetFleetEpmPackagesPkgnamePkgversionResponse(rsp *http.Response) (*GetFleetEpmPackagesPkgnamePkgversionResponse, error) { +// ParseImportTimelinesResponse parses an HTTP response from a ImportTimelinesWithResponse call +func ParseImportTimelinesResponse(rsp *http.Response) (*ImportTimelinesResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetFleetEpmPackagesPkgnamePkgversionResponse{ + response := &ImportTimelinesResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item PackageInfo `json:"item"` - Metadata *struct { - HasPolicies bool `json:"has_policies"` - } `json:"metadata,omitempty"` - } + var dest SecurityTimelineAPIImportTimelineResult if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -24690,161 +142487,103 @@ func ParseGetFleetEpmPackagesPkgnamePkgversionResponse(rsp *http.Response) (*Get case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + // Body The error message + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON400 = &dest - } - - return response, nil -} - -// ParsePostFleetEpmPackagesPkgnamePkgversionResponse parses an HTTP response from a PostFleetEpmPackagesPkgnamePkgversionWithResponse call -func ParsePostFleetEpmPackagesPkgnamePkgversionResponse(rsp *http.Response) (*PostFleetEpmPackagesPkgnamePkgversionResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } - - response := &PostFleetEpmPackagesPkgnamePkgversionResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } - - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: var dest struct { - UnderscoreMeta struct { - InstallSource string `json:"install_source"` - Name string `json:"name"` - } `json:"_meta"` - Items []PostFleetEpmPackagesPkgnamePkgversion_200_Items_Item `json:"items"` + // Body The error message + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON200 = &dest + response.JSON404 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + // Body The error message + Body *string `json:"body,omitempty"` + StatusCode *float32 `json:"statusCode,omitempty"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON400 = &dest + response.JSON409 = &dest } return response, nil } -// ParseGetFleetFleetServerHostsResponse parses an HTTP response from a GetFleetFleetServerHostsWithResponse call -func ParseGetFleetFleetServerHostsResponse(rsp *http.Response) (*GetFleetFleetServerHostsResponse, error) { +// ParseInstallPrepackedTimelinesResponse parses an HTTP response from a InstallPrepackedTimelinesWithResponse call +func ParseInstallPrepackedTimelinesResponse(rsp *http.Response) (*InstallPrepackedTimelinesResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetFleetFleetServerHostsResponse{ + response := &InstallPrepackedTimelinesResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Items []ServerHost `json:"items"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` - } + var dest SecurityTimelineAPIImportTimelineResult if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - } return response, nil } -// ParsePostFleetFleetServerHostsResponse parses an HTTP response from a PostFleetFleetServerHostsWithResponse call -func ParsePostFleetFleetServerHostsResponse(rsp *http.Response) (*PostFleetFleetServerHostsResponse, error) { +// ParseResolveTimelineResponse parses an HTTP response from a ResolveTimelineWithResponse call +func ParseResolveTimelineResponse(rsp *http.Response) (*ResolveTimelineResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PostFleetFleetServerHostsResponse{ + response := &ResolveTimelineResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item ServerHost `json:"item"` - } + var dest SecurityTimelineAPIResolvedTimeline if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - } return response, nil } -// ParseDeleteFleetFleetServerHostsItemidResponse parses an HTTP response from a DeleteFleetFleetServerHostsItemidWithResponse call -func ParseDeleteFleetFleetServerHostsItemidResponse(rsp *http.Response) (*DeleteFleetFleetServerHostsItemidResponse, error) { +// ParseGetTimelinesResponse parses an HTTP response from a GetTimelinesWithResponse call +func ParseGetTimelinesResponse(rsp *http.Response) (*GetTimelinesResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &DeleteFleetFleetServerHostsItemidResponse{ + response := &GetTimelinesResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -24852,207 +142591,154 @@ func ParseDeleteFleetFleetServerHostsItemidResponse(rsp *http.Response) (*Delete switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Id string `json:"id"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest + // CustomTemplateTimelineCount The amount of custom Timeline templates in the results + CustomTemplateTimelineCount *float32 `json:"customTemplateTimelineCount,omitempty"` - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + // DefaultTimelineCount The amount of `default` type Timelines in the results + DefaultTimelineCount *float32 `json:"defaultTimelineCount,omitempty"` + + // ElasticTemplateTimelineCount The amount of Elastic's Timeline templates in the results + ElasticTemplateTimelineCount *float32 `json:"elasticTemplateTimelineCount,omitempty"` + + // FavoriteCount The amount of favorited Timelines + FavoriteCount *float32 `json:"favoriteCount,omitempty"` + + // TemplateTimelineCount The amount of Timeline templates in the results + TemplateTimelineCount *float32 `json:"templateTimelineCount,omitempty"` + Timeline []SecurityTimelineAPITimelineResponse `json:"timeline"` + + // TotalCount The total amount of results + TotalCount float32 `json:"totalCount"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON400 = &dest + response.JSON200 = &dest } return response, nil } -// ParseGetFleetFleetServerHostsItemidResponse parses an HTTP response from a GetFleetFleetServerHostsItemidWithResponse call -func ParseGetFleetFleetServerHostsItemidResponse(rsp *http.Response) (*GetFleetFleetServerHostsItemidResponse, error) { +// ParseGetUpgradeStatusResponse parses an HTTP response from a GetUpgradeStatusWithResponse call +func ParseGetUpgradeStatusResponse(rsp *http.Response) (*GetUpgradeStatusResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetFleetFleetServerHostsItemidResponse{ + response := &GetUpgradeStatusResponse{ Body: bodyBytes, HTTPResponse: rsp, } - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item ServerHost `json:"item"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - - } - return response, nil } -// ParsePutFleetFleetServerHostsItemidResponse parses an HTTP response from a PutFleetFleetServerHostsItemidWithResponse call -func ParsePutFleetFleetServerHostsItemidResponse(rsp *http.Response) (*PutFleetFleetServerHostsItemidResponse, error) { +// ParseGetUptimeSettingsResponse parses an HTTP response from a GetUptimeSettingsWithResponse call +func ParseGetUptimeSettingsResponse(rsp *http.Response) (*GetUptimeSettingsResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PutFleetFleetServerHostsItemidResponse{ + response := &GetUptimeSettingsResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item ServerHost `json:"item"` - } + var dest map[string]interface{} if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - } return response, nil } -// ParseGetFleetOutputsResponse parses an HTTP response from a GetFleetOutputsWithResponse call -func ParseGetFleetOutputsResponse(rsp *http.Response) (*GetFleetOutputsResponse, error) { +// ParsePutUptimeSettingsResponse parses an HTTP response from a PutUptimeSettingsWithResponse call +func ParsePutUptimeSettingsResponse(rsp *http.Response) (*PutUptimeSettingsResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetFleetOutputsResponse{ + response := &PutUptimeSettingsResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Items []OutputUnion `json:"items"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` - } + var dest map[string]interface{} if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest + } + + return response, nil +} + +// ParseDeleteActionsConnectorIdResponse parses an HTTP response from a DeleteActionsConnectorIdWithResponse call +func ParseDeleteActionsConnectorIdResponse(rsp *http.Response) (*DeleteActionsConnectorIdResponse, error) { + bodyBytes, err := io.ReadAll(rsp.Body) + defer func() { _ = rsp.Body.Close() }() + if err != nil { + return nil, err + } + response := &DeleteActionsConnectorIdResponse{ + Body: bodyBytes, + HTTPResponse: rsp, } return response, nil } -// ParsePostFleetOutputsResponse parses an HTTP response from a PostFleetOutputsWithResponse call -func ParsePostFleetOutputsResponse(rsp *http.Response) (*PostFleetOutputsResponse, error) { +// ParseGetActionsConnectorIdResponse parses an HTTP response from a GetActionsConnectorIdWithResponse call +func ParseGetActionsConnectorIdResponse(rsp *http.Response) (*GetActionsConnectorIdResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PostFleetOutputsResponse{ + response := &GetActionsConnectorIdResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item OutputUnion `json:"item"` - } + var dest ConnectorResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - } return response, nil } -// ParseDeleteFleetOutputsOutputidResponse parses an HTTP response from a DeleteFleetOutputsOutputidWithResponse call -func ParseDeleteFleetOutputsOutputidResponse(rsp *http.Response) (*DeleteFleetOutputsOutputidResponse, error) { +// ParsePostActionsConnectorIdResponse parses an HTTP response from a PostActionsConnectorIdWithResponse call +func ParsePostActionsConnectorIdResponse(rsp *http.Response) (*PostActionsConnectorIdResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &DeleteFleetOutputsOutputidResponse{ + response := &PostActionsConnectorIdResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -25060,53 +142746,48 @@ func ParseDeleteFleetOutputsOutputidResponse(rsp *http.Response) (*DeleteFleetOu switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { + Config *map[string]interface{} `json:"config,omitempty"` + + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` + + // Id The identifier for the connector. Id string `json:"id"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` + + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON404 = &dest + response.JSON200 = &dest } return response, nil } -// ParseGetFleetOutputsOutputidResponse parses an HTTP response from a GetFleetOutputsOutputidWithResponse call -func ParseGetFleetOutputsOutputidResponse(rsp *http.Response) (*GetFleetOutputsOutputidResponse, error) { +// ParsePutActionsConnectorIdResponse parses an HTTP response from a PutActionsConnectorIdWithResponse call +func ParsePutActionsConnectorIdResponse(rsp *http.Response) (*PutActionsConnectorIdResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetFleetOutputsOutputidResponse{ + response := &PutActionsConnectorIdResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -25114,81 +142795,74 @@ func ParseGetFleetOutputsOutputidResponse(rsp *http.Response) (*GetFleetOutputsO switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Item OutputUnion `json:"item"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest + Config *map[string]interface{} `json:"config,omitempty"` - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` + // ConnectorTypeId The connector type identifier. + ConnectorTypeId string `json:"connector_type_id"` + + // Id The identifier for the connector. + Id string `json:"id"` + + // IsDeprecated Indicates whether the connector is deprecated. + IsDeprecated bool `json:"is_deprecated"` + + // IsMissingSecrets Indicates whether the connector is missing secrets. + IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + + // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. + IsPreconfigured bool `json:"is_preconfigured"` + + // IsSystemAction Indicates whether the connector is used for system actions. + IsSystemAction bool `json:"is_system_action"` + + // Name The name of the rule. + Name string `json:"name"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON400 = &dest + response.JSON200 = &dest } return response, nil } -// ParsePutFleetOutputsOutputidResponse parses an HTTP response from a PutFleetOutputsOutputidWithResponse call -func ParsePutFleetOutputsOutputidResponse(rsp *http.Response) (*PutFleetOutputsOutputidResponse, error) { +// ParseGetActionsConnectorsResponse parses an HTTP response from a GetActionsConnectorsWithResponse call +func ParseGetActionsConnectorsResponse(rsp *http.Response) (*GetActionsConnectorsResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PutFleetOutputsOutputidResponse{ + response := &GetActionsConnectorsResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item OutputUnion `json:"item"` - } + var dest []ConnectorResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - } return response, nil } -// ParseGetFleetPackagePoliciesResponse parses an HTTP response from a GetFleetPackagePoliciesWithResponse call -func ParseGetFleetPackagePoliciesResponse(rsp *http.Response) (*GetFleetPackagePoliciesResponse, error) { +// ParseGetAllDataViewsDefaultResponse parses an HTTP response from a GetAllDataViewsDefaultWithResponse call +func ParseGetAllDataViewsDefaultResponse(rsp *http.Response) (*GetAllDataViewsDefaultResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetFleetPackagePoliciesResponse{ + response := &GetAllDataViewsDefaultResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -25196,10 +142870,7 @@ func ParseGetFleetPackagePoliciesResponse(rsp *http.Response) (*GetFleetPackageP switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: var dest struct { - Items []PackagePolicy `json:"items"` - Page float32 `json:"page"` - PerPage float32 `json:"perPage"` - Total float32 `json:"total"` + DataView *[]GetDataViewsResponseItem `json:"data_view,omitempty"` } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err @@ -25207,13 +142878,7 @@ func ParseGetFleetPackagePoliciesResponse(rsp *http.Response) (*GetFleetPackageP response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } + var dest DataViews400Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -25224,141 +142889,88 @@ func ParseGetFleetPackagePoliciesResponse(rsp *http.Response) (*GetFleetPackageP return response, nil } -// ParsePostFleetPackagePoliciesResponse parses an HTTP response from a PostFleetPackagePoliciesWithResponse call -func ParsePostFleetPackagePoliciesResponse(rsp *http.Response) (*PostFleetPackagePoliciesResponse, error) { +// ParseCreateDataViewDefaultwResponse parses an HTTP response from a CreateDataViewDefaultwWithResponse call +func ParseCreateDataViewDefaultwResponse(rsp *http.Response) (*CreateDataViewDefaultwResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PostFleetPackagePoliciesResponse{ + response := &CreateDataViewDefaultwResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item PackagePolicy `json:"item"` - } + var dest DataViewsDataViewResponseObject if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } + var dest DataViews400Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON400 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON409 = &dest - } return response, nil } -// ParseDeleteFleetPackagePoliciesPackagepolicyidResponse parses an HTTP response from a DeleteFleetPackagePoliciesPackagepolicyidWithResponse call -func ParseDeleteFleetPackagePoliciesPackagepolicyidResponse(rsp *http.Response) (*DeleteFleetPackagePoliciesPackagepolicyidResponse, error) { +// ParseDeleteDataViewDefaultResponse parses an HTTP response from a DeleteDataViewDefaultWithResponse call +func ParseDeleteDataViewDefaultResponse(rsp *http.Response) (*DeleteDataViewDefaultResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &DeleteFleetPackagePoliciesPackagepolicyidResponse{ + response := &DeleteDataViewDefaultResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Id string `json:"id"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON200 = &dest - - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest DataViews404Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON400 = &dest + response.JSON404 = &dest } return response, nil } -// ParseGetFleetPackagePoliciesPackagepolicyidResponse parses an HTTP response from a GetFleetPackagePoliciesPackagepolicyidWithResponse call -func ParseGetFleetPackagePoliciesPackagepolicyidResponse(rsp *http.Response) (*GetFleetPackagePoliciesPackagepolicyidResponse, error) { +// ParseGetDataViewDefaultResponse parses an HTTP response from a GetDataViewDefaultWithResponse call +func ParseGetDataViewDefaultResponse(rsp *http.Response) (*GetDataViewDefaultResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetFleetPackagePoliciesPackagepolicyidResponse{ + response := &GetDataViewDefaultResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item PackagePolicy `json:"item"` - } + var dest DataViewsDataViewResponseObject if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON400 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest struct { - Message string `json:"message"` - } + var dest DataViews404Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -25369,76 +142981,118 @@ func ParseGetFleetPackagePoliciesPackagepolicyidResponse(rsp *http.Response) (*G return response, nil } -// ParsePutFleetPackagePoliciesPackagepolicyidResponse parses an HTTP response from a PutFleetPackagePoliciesPackagepolicyidWithResponse call -func ParsePutFleetPackagePoliciesPackagepolicyidResponse(rsp *http.Response) (*PutFleetPackagePoliciesPackagepolicyidResponse, error) { +// ParseUpdateDataViewDefaultResponse parses an HTTP response from a UpdateDataViewDefaultWithResponse call +func ParseUpdateDataViewDefaultResponse(rsp *http.Response) (*UpdateDataViewDefaultResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PutFleetPackagePoliciesPackagepolicyidResponse{ + response := &UpdateDataViewDefaultResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Item PackagePolicy `json:"item"` - } + var dest DataViewsDataViewResponseObject if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } + var dest DataViews400Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON400 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: - var dest struct { - Attributes interface{} `json:"attributes"` - Error *string `json:"error,omitempty"` - ErrorType *string `json:"errorType,omitempty"` - Message string `json:"message"` - StatusCode *float32 `json:"statusCode,omitempty"` - } - if err := json.Unmarshal(bodyBytes, &dest); err != nil { - return nil, err - } - response.JSON403 = &dest - } return response, nil } -// ParsePostParametersResponse parses an HTTP response from a PostParametersWithResponse call -func ParsePostParametersResponse(rsp *http.Response) (*PostParametersResponse, error) { +// ParsePostMaintenanceWindowResponse parses an HTTP response from a PostMaintenanceWindowWithResponse call +func ParsePostMaintenanceWindowResponse(rsp *http.Response) (*PostMaintenanceWindowResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PostParametersResponse{ + response := &PostMaintenanceWindowResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest CreateParamResponse + var dest struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` + + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` + + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Status The current status of the maintenance window. + Status PostMaintenanceWindow200Status `json:"status"` + + // Title The name of the maintenance window. + Title string `json:"title"` + + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -25449,15 +143103,15 @@ func ParsePostParametersResponse(rsp *http.Response) (*PostParametersResponse, e return response, nil } -// ParseDeleteParameterResponse parses an HTTP response from a DeleteParameterWithResponse call -func ParseDeleteParameterResponse(rsp *http.Response) (*DeleteParameterResponse, error) { +// ParseDeleteMaintenanceWindowIdResponse parses an HTTP response from a DeleteMaintenanceWindowIdWithResponse call +func ParseDeleteMaintenanceWindowIdResponse(rsp *http.Response) (*DeleteMaintenanceWindowIdResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &DeleteParameterResponse{ + response := &DeleteMaintenanceWindowIdResponse{ Body: bodyBytes, HTTPResponse: rsp, } @@ -25465,22 +143119,85 @@ func ParseDeleteParameterResponse(rsp *http.Response) (*DeleteParameterResponse, return response, nil } -// ParseGetParameterResponse parses an HTTP response from a GetParameterWithResponse call -func ParseGetParameterResponse(rsp *http.Response) (*GetParameterResponse, error) { +// ParseGetMaintenanceWindowIdResponse parses an HTTP response from a GetMaintenanceWindowIdWithResponse call +func ParseGetMaintenanceWindowIdResponse(rsp *http.Response) (*GetMaintenanceWindowIdResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetParameterResponse{ + response := &GetMaintenanceWindowIdResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest SyntheticsGetParameterResponse + var dest struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` + + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` + + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Status The current status of the maintenance window. + Status GetMaintenanceWindowId200Status `json:"status"` + + // Title The name of the maintenance window. + Title string `json:"title"` + + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -25491,22 +143208,85 @@ func ParseGetParameterResponse(rsp *http.Response) (*GetParameterResponse, error return response, nil } -// ParsePutParameterResponse parses an HTTP response from a PutParameterWithResponse call -func ParsePutParameterResponse(rsp *http.Response) (*PutParameterResponse, error) { +// ParsePatchMaintenanceWindowIdResponse parses an HTTP response from a PatchMaintenanceWindowIdWithResponse call +func ParsePatchMaintenanceWindowIdResponse(rsp *http.Response) (*PatchMaintenanceWindowIdResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PutParameterResponse{ + response := &PatchMaintenanceWindowIdResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest map[string]interface{} + var dest struct { + // CreatedAt The date and time when the maintenance window was created. + CreatedAt string `json:"created_at"` + + // CreatedBy The identifier for the user that created the maintenance window. + CreatedBy *string `json:"created_by"` + + // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. + Enabled bool `json:"enabled"` + + // Id The identifier for the maintenance window. + Id string `json:"id"` + Schedule struct { + Custom struct { + // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. + Duration string `json:"duration"` + Recurring *struct { + // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. + End *string `json:"end,omitempty"` + + // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. + Every *string `json:"every,omitempty"` + + // Occurrences The total number of recurrences of the schedule. + Occurrences *float32 `json:"occurrences,omitempty"` + + // OnMonth The specific months for a recurring schedule. Valid values are 1-12. + OnMonth *[]float32 `json:"onMonth,omitempty"` + + // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. + OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` + + // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. + OnWeekDay *[]string `json:"onWeekDay,omitempty"` + } `json:"recurring,omitempty"` + + // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. + Start string `json:"start"` + + // Timezone The timezone of the schedule. The default timezone is UTC. + Timezone *string `json:"timezone,omitempty"` + } `json:"custom"` + } `json:"schedule"` + Scope *struct { + Alerting struct { + Query struct { + // Kql A filter written in Kibana Query Language (KQL). + Kql string `json:"kql"` + } `json:"query"` + } `json:"alerting"` + } `json:"scope,omitempty"` + + // Status The current status of the maintenance window. + Status PatchMaintenanceWindowId200Status `json:"status"` + + // Title The name of the maintenance window. + Title string `json:"title"` + + // UpdatedAt The date and time when the maintenance window was last updated. + UpdatedAt string `json:"updated_at"` + + // UpdatedBy The identifier for the user that last updated this maintenance window. + UpdatedBy *string `json:"updated_by"` + } if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -25517,256 +143297,332 @@ func ParsePutParameterResponse(rsp *http.Response) (*PutParameterResponse, error return response, nil } -// ParseDeleteActionsConnectorIdResponse parses an HTTP response from a DeleteActionsConnectorIdWithResponse call -func ParseDeleteActionsConnectorIdResponse(rsp *http.Response) (*DeleteActionsConnectorIdResponse, error) { +// ParseFindSlosOpResponse parses an HTTP response from a FindSlosOpWithResponse call +func ParseFindSlosOpResponse(rsp *http.Response) (*FindSlosOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &DeleteActionsConnectorIdResponse{ + response := &FindSlosOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } - return response, nil -} + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: + var dest SLOsFindSloResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest -// ParseGetActionsConnectorIdResponse parses an HTTP response from a GetActionsConnectorIdWithResponse call -func ParseGetActionsConnectorIdResponse(rsp *http.Response) (*GetActionsConnectorIdResponse, error) { - bodyBytes, err := io.ReadAll(rsp.Body) - defer func() { _ = rsp.Body.Close() }() - if err != nil { - return nil, err - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SLOs400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - response := &GetActionsConnectorIdResponse{ - Body: bodyBytes, - HTTPResponse: rsp, - } + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest - switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest ConnectorResponse + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SLOs404Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON200 = &dest + response.JSON404 = &dest } return response, nil } -// ParsePostActionsConnectorIdResponse parses an HTTP response from a PostActionsConnectorIdWithResponse call -func ParsePostActionsConnectorIdResponse(rsp *http.Response) (*PostActionsConnectorIdResponse, error) { +// ParseCreateSloOpResponse parses an HTTP response from a CreateSloOpWithResponse call +func ParseCreateSloOpResponse(rsp *http.Response) (*CreateSloOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PostActionsConnectorIdResponse{ + response := &CreateSloOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Config *map[string]interface{} `json:"config,omitempty"` - - // ConnectorTypeId The connector type identifier. - ConnectorTypeId string `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector is deprecated. - IsDeprecated bool `json:"is_deprecated"` - - // IsMissingSecrets Indicates whether the connector is missing secrets. - IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` + var dest SLOsCreateSloResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured bool `json:"is_preconfigured"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SLOs400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // IsSystemAction Indicates whether the connector is used for system actions. - IsSystemAction bool `json:"is_system_action"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - // Name The name of the rule. - Name string `json:"name"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 409: + var dest SLOs409Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON200 = &dest + response.JSON409 = &dest } return response, nil } -// ParsePutActionsConnectorIdResponse parses an HTTP response from a PutActionsConnectorIdWithResponse call -func ParsePutActionsConnectorIdResponse(rsp *http.Response) (*PutActionsConnectorIdResponse, error) { +// ParseBulkDeleteOpResponse parses an HTTP response from a BulkDeleteOpWithResponse call +func ParseBulkDeleteOpResponse(rsp *http.Response) (*BulkDeleteOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PutActionsConnectorIdResponse{ + response := &BulkDeleteOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - Config *map[string]interface{} `json:"config,omitempty"` - - // ConnectorTypeId The connector type identifier. - ConnectorTypeId string `json:"connector_type_id"` - - // Id The identifier for the connector. - Id string `json:"id"` - - // IsDeprecated Indicates whether the connector is deprecated. - IsDeprecated bool `json:"is_deprecated"` - - // IsMissingSecrets Indicates whether the connector is missing secrets. - IsMissingSecrets *bool `json:"is_missing_secrets,omitempty"` - - // IsPreconfigured Indicates whether the connector is preconfigured. If true, the `config` and `is_missing_secrets` properties are omitted from the response. - IsPreconfigured bool `json:"is_preconfigured"` + var dest SLOsBulkDeleteResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // IsSystemAction Indicates whether the connector is used for system actions. - IsSystemAction bool `json:"is_system_action"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SLOs400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // Name The name of the rule. - Name string `json:"name"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON200 = &dest + response.JSON403 = &dest } return response, nil } -// ParseGetActionsConnectorsResponse parses an HTTP response from a GetActionsConnectorsWithResponse call -func ParseGetActionsConnectorsResponse(rsp *http.Response) (*GetActionsConnectorsResponse, error) { +// ParseBulkDeleteStatusOpResponse parses an HTTP response from a BulkDeleteStatusOpWithResponse call +func ParseBulkDeleteStatusOpResponse(rsp *http.Response) (*BulkDeleteStatusOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetActionsConnectorsResponse{ + response := &BulkDeleteStatusOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest []ConnectorResponse + var dest SLOsBulkDeleteStatusResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SLOs400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + } return response, nil } -// ParseGetAllDataViewsDefaultResponse parses an HTTP response from a GetAllDataViewsDefaultWithResponse call -func ParseGetAllDataViewsDefaultResponse(rsp *http.Response) (*GetAllDataViewsDefaultResponse, error) { +// ParseDeleteRollupDataOpResponse parses an HTTP response from a DeleteRollupDataOpWithResponse call +func ParseDeleteRollupDataOpResponse(rsp *http.Response) (*DeleteRollupDataOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetAllDataViewsDefaultResponse{ + response := &DeleteRollupDataOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - DataView *[]GetDataViewsResponseItem `json:"data_view,omitempty"` - } + var dest SLOsBulkPurgeRollupResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest DataViews400Response + var dest SLOs400Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON400 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + } return response, nil } -// ParseCreateDataViewDefaultwResponse parses an HTTP response from a CreateDataViewDefaultwWithResponse call -func ParseCreateDataViewDefaultwResponse(rsp *http.Response) (*CreateDataViewDefaultwResponse, error) { +// ParseDeleteSloInstancesOpResponse parses an HTTP response from a DeleteSloInstancesOpWithResponse call +func ParseDeleteSloInstancesOpResponse(rsp *http.Response) (*DeleteSloInstancesOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &CreateDataViewDefaultwResponse{ + response := &DeleteSloInstancesOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest DataViewsDataViewResponseObject + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SLOs400Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON200 = &dest + response.JSON400 = &dest - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest DataViews400Response + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON400 = &dest + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest } return response, nil } -// ParseDeleteDataViewDefaultResponse parses an HTTP response from a DeleteDataViewDefaultWithResponse call -func ParseDeleteDataViewDefaultResponse(rsp *http.Response) (*DeleteDataViewDefaultResponse, error) { +// ParseDeleteSloOpResponse parses an HTTP response from a DeleteSloOpWithResponse call +func ParseDeleteSloOpResponse(rsp *http.Response) (*DeleteSloOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &DeleteDataViewDefaultResponse{ + response := &DeleteSloOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SLOs400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest DataViews404Response + var dest SLOs404Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -25777,29 +143633,50 @@ func ParseDeleteDataViewDefaultResponse(rsp *http.Response) (*DeleteDataViewDefa return response, nil } -// ParseGetDataViewDefaultResponse parses an HTTP response from a GetDataViewDefaultWithResponse call -func ParseGetDataViewDefaultResponse(rsp *http.Response) (*GetDataViewDefaultResponse, error) { +// ParseGetSloOpResponse parses an HTTP response from a GetSloOpWithResponse call +func ParseGetSloOpResponse(rsp *http.Response) (*GetSloOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetDataViewDefaultResponse{ + response := &GetSloOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest DataViewsDataViewResponseObject + var dest SLOsSloWithSummaryResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SLOs400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: - var dest DataViews404Response + var dest SLOs404Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } @@ -25810,316 +143687,249 @@ func ParseGetDataViewDefaultResponse(rsp *http.Response) (*GetDataViewDefaultRes return response, nil } -// ParseUpdateDataViewDefaultResponse parses an HTTP response from a UpdateDataViewDefaultWithResponse call -func ParseUpdateDataViewDefaultResponse(rsp *http.Response) (*UpdateDataViewDefaultResponse, error) { +// ParseUpdateSloOpResponse parses an HTTP response from a UpdateSloOpWithResponse call +func ParseUpdateSloOpResponse(rsp *http.Response) (*UpdateSloOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &UpdateDataViewDefaultResponse{ + response := &UpdateSloOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest DataViewsDataViewResponseObject + var dest SLOsSloDefinitionResponse if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON200 = &dest case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: - var dest DataViews400Response + var dest SLOs400Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } response.JSON400 = &dest + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SLOs404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + } return response, nil } -// ParsePostMaintenanceWindowResponse parses an HTTP response from a PostMaintenanceWindowWithResponse call -func ParsePostMaintenanceWindowResponse(rsp *http.Response) (*PostMaintenanceWindowResponse, error) { +// ParseResetSloOpResponse parses an HTTP response from a ResetSloOpWithResponse call +func ParseResetSloOpResponse(rsp *http.Response) (*ResetSloOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PostMaintenanceWindowResponse{ + response := &ResetSloOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - // CreatedAt The date and time when the maintenance window was created. - CreatedAt string `json:"created_at"` - - // CreatedBy The identifier for the user that created the maintenance window. - CreatedBy *string `json:"created_by"` - - // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. - Enabled bool `json:"enabled"` - - // Id The identifier for the maintenance window. - Id string `json:"id"` - Schedule struct { - Custom struct { - // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. - Duration string `json:"duration"` - Recurring *struct { - // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. - End *string `json:"end,omitempty"` - - // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. - Every *string `json:"every,omitempty"` - - // Occurrences The total number of recurrences of the schedule. - Occurrences *float32 `json:"occurrences,omitempty"` - - // OnMonth The specific months for a recurring schedule. Valid values are 1-12. - OnMonth *[]float32 `json:"onMonth,omitempty"` - - // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. - OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` - - // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. - OnWeekDay *[]string `json:"onWeekDay,omitempty"` - } `json:"recurring,omitempty"` - - // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. - Start string `json:"start"` - - // Timezone The timezone of the schedule. The default timezone is UTC. - Timezone *string `json:"timezone,omitempty"` - } `json:"custom"` - } `json:"schedule"` - Scope *struct { - Alerting struct { - Query struct { - // Kql A filter written in Kibana Query Language (KQL). - Kql string `json:"kql"` - } `json:"query"` - } `json:"alerting"` - } `json:"scope,omitempty"` - - // Status The current status of the maintenance window. - Status PostMaintenanceWindow200Status `json:"status"` + var dest SLOsSloDefinitionResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // Title The name of the maintenance window. - Title string `json:"title"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SLOs400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // UpdatedAt The date and time when the maintenance window was last updated. - UpdatedAt string `json:"updated_at"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - // UpdatedBy The identifier for the user that last updated this maintenance window. - UpdatedBy *string `json:"updated_by"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SLOs404Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON200 = &dest + response.JSON404 = &dest } return response, nil } -// ParseDeleteMaintenanceWindowIdResponse parses an HTTP response from a DeleteMaintenanceWindowIdWithResponse call -func ParseDeleteMaintenanceWindowIdResponse(rsp *http.Response) (*DeleteMaintenanceWindowIdResponse, error) { +// ParseDisableSloOpResponse parses an HTTP response from a DisableSloOpWithResponse call +func ParseDisableSloOpResponse(rsp *http.Response) (*DisableSloOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &DeleteMaintenanceWindowIdResponse{ + response := &DisableSloOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } + switch { + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SLOs400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SLOs404Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON404 = &dest + + } + return response, nil } -// ParseGetMaintenanceWindowIdResponse parses an HTTP response from a GetMaintenanceWindowIdWithResponse call -func ParseGetMaintenanceWindowIdResponse(rsp *http.Response) (*GetMaintenanceWindowIdResponse, error) { +// ParseEnableSloOpResponse parses an HTTP response from a EnableSloOpWithResponse call +func ParseEnableSloOpResponse(rsp *http.Response) (*EnableSloOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &GetMaintenanceWindowIdResponse{ + response := &EnableSloOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { - case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - // CreatedAt The date and time when the maintenance window was created. - CreatedAt string `json:"created_at"` - - // CreatedBy The identifier for the user that created the maintenance window. - CreatedBy *string `json:"created_by"` - - // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. - Enabled bool `json:"enabled"` - - // Id The identifier for the maintenance window. - Id string `json:"id"` - Schedule struct { - Custom struct { - // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. - Duration string `json:"duration"` - Recurring *struct { - // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. - End *string `json:"end,omitempty"` - - // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. - Every *string `json:"every,omitempty"` - - // Occurrences The total number of recurrences of the schedule. - Occurrences *float32 `json:"occurrences,omitempty"` - - // OnMonth The specific months for a recurring schedule. Valid values are 1-12. - OnMonth *[]float32 `json:"onMonth,omitempty"` - - // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. - OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` - - // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. - OnWeekDay *[]string `json:"onWeekDay,omitempty"` - } `json:"recurring,omitempty"` - - // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. - Start string `json:"start"` - - // Timezone The timezone of the schedule. The default timezone is UTC. - Timezone *string `json:"timezone,omitempty"` - } `json:"custom"` - } `json:"schedule"` - Scope *struct { - Alerting struct { - Query struct { - // Kql A filter written in Kibana Query Language (KQL). - Kql string `json:"kql"` - } `json:"query"` - } `json:"alerting"` - } `json:"scope,omitempty"` - - // Status The current status of the maintenance window. - Status GetMaintenanceWindowId200Status `json:"status"` - - // Title The name of the maintenance window. - Title string `json:"title"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SLOs400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // UpdatedAt The date and time when the maintenance window was last updated. - UpdatedAt string `json:"updated_at"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON401 = &dest - // UpdatedBy The identifier for the user that last updated this maintenance window. - UpdatedBy *string `json:"updated_by"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON403 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 404: + var dest SLOs404Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON200 = &dest + response.JSON404 = &dest } return response, nil } -// ParsePatchMaintenanceWindowIdResponse parses an HTTP response from a PatchMaintenanceWindowIdWithResponse call -func ParsePatchMaintenanceWindowIdResponse(rsp *http.Response) (*PatchMaintenanceWindowIdResponse, error) { +// ParseGetDefinitionsOpResponse parses an HTTP response from a GetDefinitionsOpWithResponse call +func ParseGetDefinitionsOpResponse(rsp *http.Response) (*GetDefinitionsOpResponse, error) { bodyBytes, err := io.ReadAll(rsp.Body) defer func() { _ = rsp.Body.Close() }() if err != nil { return nil, err } - response := &PatchMaintenanceWindowIdResponse{ + response := &GetDefinitionsOpResponse{ Body: bodyBytes, HTTPResponse: rsp, } switch { case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 200: - var dest struct { - // CreatedAt The date and time when the maintenance window was created. - CreatedAt string `json:"created_at"` - - // CreatedBy The identifier for the user that created the maintenance window. - CreatedBy *string `json:"created_by"` - - // Enabled Whether the current maintenance window is enabled. Disabled maintenance windows do not suppress notifications. - Enabled bool `json:"enabled"` - - // Id The identifier for the maintenance window. - Id string `json:"id"` - Schedule struct { - Custom struct { - // Duration The duration of the schedule. It allows values in `` format. `` is one of `d`, `h`, `m`, or `s` for hours, minutes, seconds. For example: `1d`, `5h`, `30m`, `5000s`. - Duration string `json:"duration"` - Recurring *struct { - // End The end date of a recurring schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-04-01T00:00:00.000Z`. - End *string `json:"end,omitempty"` - - // Every The interval and frequency of a recurring schedule. It allows values in `` format. `` is one of `d`, `w`, `M`, or `y` for days, weeks, months, years. For example: `15d`, `2w`, `3m`, `1y`. - Every *string `json:"every,omitempty"` - - // Occurrences The total number of recurrences of the schedule. - Occurrences *float32 `json:"occurrences,omitempty"` - - // OnMonth The specific months for a recurring schedule. Valid values are 1-12. - OnMonth *[]float32 `json:"onMonth,omitempty"` - - // OnMonthDay The specific days of the month for a recurring schedule. Valid values are 1-31. - OnMonthDay *[]float32 `json:"onMonthDay,omitempty"` - - // OnWeekDay The specific days of the week (`[MO,TU,WE,TH,FR,SA,SU]`) or nth day of month (`[+1MO, -3FR, +2WE, -4SA, -5SU]`) for a recurring schedule. - OnWeekDay *[]string `json:"onWeekDay,omitempty"` - } `json:"recurring,omitempty"` - - // Start The start date and time of the schedule, provided in ISO 8601 format and set to the UTC timezone. For example: `2025-03-12T12:00:00.000Z`. - Start string `json:"start"` - - // Timezone The timezone of the schedule. The default timezone is UTC. - Timezone *string `json:"timezone,omitempty"` - } `json:"custom"` - } `json:"schedule"` - Scope *struct { - Alerting struct { - Query struct { - // Kql A filter written in Kibana Query Language (KQL). - Kql string `json:"kql"` - } `json:"query"` - } `json:"alerting"` - } `json:"scope,omitempty"` - - // Status The current status of the maintenance window. - Status PatchMaintenanceWindowId200Status `json:"status"` - - // Title The name of the maintenance window. - Title string `json:"title"` + var dest SLOsFindSloDefinitionsResponse + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON200 = &dest - // UpdatedAt The date and time when the maintenance window was last updated. - UpdatedAt string `json:"updated_at"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 400: + var dest SLOs400Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err + } + response.JSON400 = &dest - // UpdatedBy The identifier for the user that last updated this maintenance window. - UpdatedBy *string `json:"updated_by"` + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 401: + var dest SLOs401Response + if err := json.Unmarshal(bodyBytes, &dest); err != nil { + return nil, err } + response.JSON401 = &dest + + case strings.Contains(rsp.Header.Get("Content-Type"), "json") && rsp.StatusCode == 403: + var dest SLOs403Response if err := json.Unmarshal(bodyBytes, &dest); err != nil { return nil, err } - response.JSON200 = &dest + response.JSON403 = &dest } diff --git a/generated/kbapi/transform_schema.go b/generated/kbapi/transform_schema.go index 1e6af5f1d..20e5ea8e2 100644 --- a/generated/kbapi/transform_schema.go +++ b/generated/kbapi/transform_schema.go @@ -116,19 +116,29 @@ type Path struct { func (p Path) Endpoints(yield func(key string, endpoint Map) bool) { if p.Get != nil { - yield("get", p.Get) + if !yield("get", p.Get) { + return + } } if p.Post != nil { - yield("post", p.Post) + if !yield("post", p.Post) { + return + } } if p.Put != nil { - yield("put", p.Put) + if !yield("put", p.Put) { + return + } } if p.Patch != nil { - yield("patch", p.Patch) + if !yield("patch", p.Patch) { + return + } } if p.Delete != nil { - yield("delete", p.Delete) + if !yield("delete", p.Delete) { + return + } } } @@ -546,76 +556,21 @@ func (s Slice) atoi(key string) int { type TransformFunc func(schema *Schema) var transformers = []TransformFunc{ - transformFilterPaths, transformRemoveKbnXsrf, transformRemoveApiVersionParam, transformSimplifyContentType, transformAddMisingDescriptions, transformKibanaPaths, transformFleetPaths, + removeBrokenDiscriminator, + fixPutSecurityRoleName, + fixGetSpacesParams, + fixGetSyntheticsMonitorsParams, transformRemoveExamples, transformRemoveUnusedComponents, transformOmitEmptyNullable, } -// transformFilterPaths filters the paths in a schema down to a specified list -// of endpoints and methods. -func transformFilterPaths(schema *Schema) { - var includePaths = map[string][]string{ - "/api/data_views": {"get"}, - "/api/data_views/data_view": {"post"}, - "/api/data_views/data_view/{viewId}": {"get", "post", "delete"}, - "/api/fleet/agent_policies": {"get", "post"}, - "/api/fleet/agent_policies/delete": {"post"}, - "/api/fleet/agent_policies/{agentPolicyId}": {"get", "put"}, - "/api/fleet/enrollment_api_keys": {"get"}, - "/api/fleet/epm/packages": {"get", "post"}, - "/api/fleet/epm/packages/{pkgName}/{pkgVersion}": {"get", "post", "delete"}, - "/api/fleet/fleet_server_hosts": {"get", "post"}, - "/api/fleet/fleet_server_hosts/{itemId}": {"get", "put", "delete"}, - "/api/fleet/outputs": {"get", "post"}, - "/api/fleet/outputs/{outputId}": {"get", "put", "delete"}, - "/api/fleet/package_policies": {"get", "post"}, - "/api/fleet/package_policies/{packagePolicyId}": {"get", "put", "delete"}, - "/api/synthetics/params": {"post"}, - "/api/synthetics/params/{id}": {"get", "put", "delete"}, - "/api/apm/settings/agent-configuration": {"get", "put", "delete"}, - "/api/maintenance_window": {"post"}, - "/api/maintenance_window/{id}": {"delete", "get", "patch"}, - "/api/actions/connector/{id}": {"get", "put", "post", "delete"}, - "/api/actions/connectors": {"get"}, - } - - for path, pathInfo := range schema.Paths { - if allowedMethods, ok := includePaths[path]; ok { - // Filter out endpoints not if filter list - for method := range pathInfo.Endpoints { - if !slices.Contains(allowedMethods, method) { - pathInfo.SetEndpoint(method, nil) - } - } - } else { - // Remove paths not in filter list. - delete(schema.Paths, path) - } - } - - // Go through again, verify each entry exists - for path, methods := range includePaths { - pathInfo := schema.GetPath(path) - if pathInfo == nil { - log.Panicf("Missing path %q", path) - } - - for _, method := range methods { - endpoint := pathInfo.GetEndpoint(method) - if endpoint == nil { - log.Panicf("Missing method %q of %q", method, path) - } - } - } -} - // transformRemoveKbnXsrf removes the kbn-xsrf header as it is already applied // in the client. func transformRemoveKbnXsrf(schema *Schema) { @@ -710,7 +665,7 @@ func transformAddMisingDescriptions(schema *Schema) { for _, endpoint := range pathInfo.Endpoints { responses, ok := endpoint.GetMap("responses") if !ok { - return + continue } for code := range responses { @@ -856,6 +811,55 @@ func transformKibanaPaths(schema *Schema) { schema.Components.CreateRef(schema, "Data_views_create_data_view_request_object_inner", "schemas.Data_views_create_data_view_request_object.properties.data_view") schema.Components.CreateRef(schema, "Data_views_update_data_view_request_object_inner", "schemas.Data_views_update_data_view_request_object.properties.data_view") + +} + +func removeBrokenDiscriminator(schema *Schema) { + brokenDiscriminatorPaths := map[string]string{ + "/api/detection_engine/rules/preview": "post", + "/api/synthetics/monitors": "post", + "/api/synthetics/monitors/{id}": "put", + } + + brokenDiscriminatorComponents := []string{ + "Security_AI_Assistant_API_KnowledgeBaseEntryCreateProps", + "Security_AI_Assistant_API_KnowledgeBaseEntryResponse", + "Security_AI_Assistant_API_KnowledgeBaseEntryUpdateProps", + "Security_AI_Assistant_API_KnowledgeBaseEntryUpdateRouteProps", + "Security_Detections_API_RuleCreateProps", + "Security_Detections_API_RuleResponse", + "Security_Detections_API_RuleSource", + "Security_Detections_API_RuleUpdateProps", + "Security_Endpoint_Exceptions_API_ExceptionListItemEntry", + "Security_Exceptions_API_ExceptionListItemEntry", + } + + for _, component := range brokenDiscriminatorComponents { + schema.Components.Delete(fmt.Sprintf("schemas.%s.discriminator", component)) + } + + for path, method := range brokenDiscriminatorPaths { + schema.MustGetPath(path).MustGetEndpoint(method).Delete("requestBody.content.application/json.schema.discriminator") + } +} + +func fixPutSecurityRoleName(schema *Schema) { + putEndpoint := schema.MustGetPath("/api/security/role/{name}").MustGetEndpoint("put") + putEndpoint.Delete("requestBody.content.application/json.schema.properties.kibana.items.properties.base.anyOf") + putEndpoint.Move("requestBody.content.application/json.schema.properties.kibana.items.properties.spaces.anyOf.1", "requestBody.content.application/json.schema.properties.kibana.items.properties.spaces") + + postEndpoint := schema.MustGetPath("/api/security/roles").MustGetEndpoint("post") + postEndpoint.Move("requestBody.content.application/json.schema.properties.roles.additionalProperties", "requestBody.content.application/json.schema.properties.roles") + postEndpoint.Delete("requestBody.content.application/json.schema.properties.roles.properties.kibana.items.properties.base.anyOf") + postEndpoint.Move("requestBody.content.application/json.schema.properties.roles.properties.kibana.items.properties.spaces.anyOf.1", "requestBody.content.application/json.schema.properties.roles.properties.kibana.items.properties.spaces") +} + +func fixGetSpacesParams(schema *Schema) { + schema.MustGetPath("/api/spaces/space").MustGetEndpoint("get").Delete("parameters.1.schema.anyOf") +} + +func fixGetSyntheticsMonitorsParams(schema *Schema) { + schema.MustGetPath("/api/synthetics/monitors").MustGetEndpoint("get").Move("parameters.12.schema.oneOf.1", "parameters.12.schema") } // transformFleetPaths fixes the fleet paths. diff --git a/internal/clients/fleet/fleet.go b/internal/clients/fleet/fleet.go index a9e46c94d..aea02011d 100644 --- a/internal/clients/fleet/fleet.go +++ b/internal/clients/fleet/fleet.go @@ -292,7 +292,7 @@ func CreatePackagePolicy(ctx context.Context, client *Client, req kbapi.PackageP // UpdatePackagePolicy updates an existing package policy. func UpdatePackagePolicy(ctx context.Context, client *Client, id string, req kbapi.PackagePolicyRequest) (*kbapi.PackagePolicy, diag.Diagnostics) { params := kbapi.PutFleetPackagePoliciesPackagepolicyidParams{ - Format: utils.Pointer(kbapi.Simplified), + Format: utils.Pointer(kbapi.PutFleetPackagePoliciesPackagepolicyidParamsFormatSimplified), } resp, err := client.API.PutFleetPackagePoliciesPackagepolicyidWithResponse(ctx, id, ¶ms, req) diff --git a/internal/fleet/agent_policy/models.go b/internal/fleet/agent_policy/models.go index 1fddf95cd..717466e14 100644 --- a/internal/fleet/agent_policy/models.go +++ b/internal/fleet/agent_policy/models.go @@ -197,10 +197,10 @@ func (model *agentPolicyModel) toAPICreateModel(ctx context.Context, feat featur func (model *agentPolicyModel) toAPIUpdateModel(ctx context.Context, feat features) (kbapi.PutFleetAgentPoliciesAgentpolicyidJSONRequestBody, diag.Diagnostics) { monitoring := make([]kbapi.PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabled, 0, 2) if model.MonitorLogs.ValueBool() { - monitoring = append(monitoring, kbapi.Logs) + monitoring = append(monitoring, kbapi.PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabledLogs) } if model.MonitorMetrics.ValueBool() { - monitoring = append(monitoring, kbapi.Metrics) + monitoring = append(monitoring, kbapi.PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabledMetrics) } if utils.IsKnown(model.SupportsAgentless) && !feat.SupportsSupportsAgentless { From 0da7e29e355fed295a6881fb9aa65fe30e5ab11b Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Wed, 10 Sep 2025 20:41:04 +1000 Subject: [PATCH 48/66] Add required environment variables for acceptance tests to run (#1294) --- .github/copilot-instructions.md | 15 +++++++++++++++ Makefile | 2 +- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md index 27611aa2f..7e340a86c 100644 --- a/.github/copilot-instructions.md +++ b/.github/copilot-instructions.md @@ -59,6 +59,21 @@ Carefully read the issue and think hard about a plan to solve it before coding. - Write additional tests if needed to capture important behaviors or edge cases. - Ensure all tests pass before finalizing. +### 6.1 Acceptance Testing Requirements +When running acceptance tests (`make testacc`), ensure the following: + + +- **Environment Variables** - The following environment variables are required for acceptance tests: + - `ELASTICSEARCH_ENDPOINTS` (default: http://localhost:9200) + - `ELASTICSEARCH_USERNAME` (default: elastic) + - `ELASTICSEARCH_PASSWORD` (default: password) + - `KIBANA_ENDPOINT` (default: http://localhost:5601) + - `TF_ACC` (must be set to "1" to enable acceptance tests) +- **Ensure a valid environment if using `go test`** - Check if the required environment variables are set, if not use the defaults specified above. +- **Always finish with `make testacc`** - This will run all tests. Make sure all tests pass before considering a task complete. +- **Pre-set Environment Variables** - Default environment variables are configured in the Makefile. If these defaults are suitable for your testing environment, `make testacc` will work directly without additional setup +- **Docker Environment** - For isolated testing with guaranteed environment setup, use `make docker-testacc` which starts Elasticsearch and Kibana containers automatically + ## 7. Final Verification - Confirm the root cause is fixed. - Review your solution for logic correctness and robustness. diff --git a/Makefile b/Makefile index 770e0f548..9a534f1e5 100644 --- a/Makefile +++ b/Makefile @@ -225,7 +225,7 @@ docker-clean: ## Try to remove provisioned nodes and assigned network .PHONY: docs-generate docs-generate: tools ## Generate documentation for the provider - @ go tool github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs + @ go tool github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate --provider-name elasticstack .PHONY: gen From f5a7da30aef4aed0135fafb1d817b00325c99c15 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Thu, 11 Sep 2025 06:51:17 +1000 Subject: [PATCH 49/66] Migrate elasticstack_kibana_action_connector resource to the plugin framework (#1269) * Migrate elasticstack_kibana_action_connector resource to the plugin framework * CHANGELOG.md * Support slack_api * Update internal/kibana/connectors/read.go Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> * Add acceptance test verifying the SDK -> PF migration does not break state * SDK -> terraform-plugin-testing --------- Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com> --- CHANGELOG.md | 2 + docs/resources/kibana_action_connector.md | 15 +- go.mod | 2 +- internal/clients/kibana_oapi/connector.go | 115 +- .../clients/kibana_oapi/connector_test.go | 6 +- internal/kibana/connector.go | 298 --- internal/kibana/connector_data_source.go | 30 + internal/kibana/connector_test.go | 1609 ----------------- internal/kibana/connectors/acc_test.go | 331 ++++ internal/kibana/connectors/config_type.go | 86 + .../kibana/connectors/config_type_test.go | 176 ++ internal/kibana/connectors/config_value.go | 180 ++ .../kibana/connectors/config_value_test.go | 427 +++++ internal/kibana/connectors/create.go | 81 + internal/kibana/connectors/delete.go | 40 + internal/kibana/connectors/models.go | 73 + internal/kibana/connectors/read.go | 69 + internal/kibana/connectors/resource.go | 37 + internal/kibana/connectors/schema.go | 90 + internal/kibana/connectors/update.go | 73 + internal/utils/diag.go | 26 + internal/utils/validators/is_uuid.go | 41 + internal/utils/validators/is_uuid_test.go | 156 ++ provider/plugin_framework.go | 2 + provider/provider.go | 9 +- 25 files changed, 1989 insertions(+), 1985 deletions(-) delete mode 100644 internal/kibana/connector.go delete mode 100644 internal/kibana/connector_test.go create mode 100644 internal/kibana/connectors/acc_test.go create mode 100644 internal/kibana/connectors/config_type.go create mode 100644 internal/kibana/connectors/config_type_test.go create mode 100644 internal/kibana/connectors/config_value.go create mode 100644 internal/kibana/connectors/config_value_test.go create mode 100644 internal/kibana/connectors/create.go create mode 100644 internal/kibana/connectors/delete.go create mode 100644 internal/kibana/connectors/models.go create mode 100644 internal/kibana/connectors/read.go create mode 100644 internal/kibana/connectors/resource.go create mode 100644 internal/kibana/connectors/schema.go create mode 100644 internal/kibana/connectors/update.go create mode 100644 internal/utils/validators/is_uuid.go create mode 100644 internal/utils/validators/is_uuid_test.go diff --git a/CHANGELOG.md b/CHANGELOG.md index bc8a1b402..d58890df0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,6 +9,8 @@ - Add support for managing cross_cluster API keys in `elasticstack_elasticsearch_security_api_key` ([#1252](https://github.com/elastic/terraform-provider-elasticstack/pull/1252)) - Allow version changes without a destroy/create cycle with `elasticstack_fleet_integration` ([#1255](https://github.com/elastic/terraform-provider-elasticstack/pull/1255)). This fixes an issue where it was impossible to upgrade integrations which are used by an integration policy. - Add `namespace` attribute to `elasticstack_kibana_synthetics_monitor` resource to support setting data stream namespace independently from `space_id` ([#1247](https://github.com/elastic/terraform-provider-elasticstack/pull/1247)) +- Support setting an explit `connector_id` in `elasticstack_kibana_action_connector`. This attribute already existed, but was being ignored by the provider. Setting the attribute will return an error in Elastic Stack v8.8 and lower since creating a connector with an explicit ID is not supported. ([1260](https://github.com/elastic/terraform-provider-elasticstack/pull/1260)) +- Migrate `elasticstack_kibana_action_connector` to the Terraform plugin framework ([#1269](https://github.com/elastic/terraform-provider-elasticstack/pull/1269)) - Migrate `elasticstack_elasticsearch_security_role_mapping` resource and data source to Terraform Plugin Framework ([#1279](https://github.com/elastic/terraform-provider-elasticstack/pull/1279)) ## [0.11.17] - 2025-07-21 diff --git a/docs/resources/kibana_action_connector.md b/docs/resources/kibana_action_connector.md index a15f906d0..5eab4c353 100644 --- a/docs/resources/kibana_action_connector.md +++ b/docs/resources/kibana_action_connector.md @@ -66,16 +66,29 @@ resource "elasticstack_kibana_action_connector" "slack-api-connector" { - `config` (String) The configuration for the connector. Configuration properties vary depending on the connector type. - `connector_id` (String) A UUID v1 or v4 to use instead of a randomly generated ID. +- `kibana_connection` (Block List) Kibana connection configuration block. (see [below for nested schema](#nestedblock--kibana_connection)) - `secrets` (String, Sensitive) The secrets configuration for the connector. Secrets configuration properties vary depending on the connector type. - `space_id` (String) An identifier for the space. If space_id is not provided, the default space is used. ### Read-Only -- `id` (String) The ID of this resource. +- `id` (String) Internal identifier of the resource. - `is_deprecated` (Boolean) Indicates whether the connector type is deprecated. - `is_missing_secrets` (Boolean) Indicates whether secrets are missing for the connector. - `is_preconfigured` (Boolean) Indicates whether it is a preconfigured connector. + +### Nested Schema for `kibana_connection` + +Optional: + +- `api_key` (String, Sensitive) API Key to use for authentication to Kibana +- `ca_certs` (List of String) A list of paths to CA certificates to validate the certificate presented by the Kibana server. +- `endpoints` (List of String, Sensitive) A comma-separated list of endpoints where the terraform provider will point to, this must include the http(s) schema and port number. +- `insecure` (Boolean) Disable TLS certificate validation +- `password` (String, Sensitive) Password to use for API authentication to Kibana. +- `username` (String) Username to use for API authentication to Kibana. + ## Import Import is supported using the following syntax: diff --git a/go.mod b/go.mod index 8321067e0..a553bf000 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( github.com/google/gofuzz v1.2.0 github.com/google/uuid v1.6.0 github.com/hashicorp/go-cty v1.5.0 + github.com/hashicorp/go-uuid v1.0.3 github.com/hashicorp/go-version v1.7.0 github.com/hashicorp/terraform-plugin-framework v1.15.1 github.com/hashicorp/terraform-plugin-framework-jsontypes v0.2.0 @@ -217,7 +218,6 @@ require ( github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-plugin v1.6.3 // indirect github.com/hashicorp/go-retryablehttp v0.7.7 // indirect - github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/golang-lru v1.0.2 // indirect github.com/hashicorp/hc-install v0.9.2 // indirect github.com/hashicorp/hcl/v2 v2.23.0 // indirect diff --git a/internal/clients/kibana_oapi/connector.go b/internal/clients/kibana_oapi/connector.go index af6177773..4b6b47f63 100644 --- a/internal/clients/kibana_oapi/connector.go +++ b/internal/clients/kibana_oapi/connector.go @@ -11,14 +11,15 @@ import ( "github.com/elastic/terraform-provider-elasticstack/generated/kbapi" "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" + fwdiag "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-log/tflog" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + sdkdiag "github.com/hashicorp/terraform-plugin-sdk/v2/diag" ) -func CreateConnector(ctx context.Context, client *Client, connectorOld models.KibanaActionConnector) (string, diag.Diagnostics) { +func CreateConnector(ctx context.Context, client *Client, connectorOld models.KibanaActionConnector) (string, fwdiag.Diagnostics) { body, err := createConnectorRequestBody(connectorOld) if err != nil { - return "", diag.FromErr(err) + return "", fwdiag.Diagnostics{fwdiag.NewErrorDiagnostic("Failed to create connector request body", err.Error())} } resp, err := client.API.PostActionsConnectorIdWithResponse( @@ -35,40 +36,40 @@ func CreateConnector(ctx context.Context, client *Client, connectorOld models.Ki }, ) if err != nil { - return "", diag.FromErr(err) + return "", fwdiag.Diagnostics{fwdiag.NewErrorDiagnostic("HTTP request failed", err.Error())} } switch resp.StatusCode() { case http.StatusOK: return resp.JSON200.Id, nil default: - return "", reportUnknownErrorSDK(resp.StatusCode(), resp.Body) + return "", reportUnknownError(resp.StatusCode(), resp.Body) } } -func UpdateConnector(ctx context.Context, client *Client, connectorOld models.KibanaActionConnector) (string, diag.Diagnostics) { +func UpdateConnector(ctx context.Context, client *Client, connectorOld models.KibanaActionConnector) (string, fwdiag.Diagnostics) { body, err := updateConnectorRequestBody(connectorOld) if err != nil { - return "", diag.FromErr(err) + return "", fwdiag.Diagnostics{fwdiag.NewErrorDiagnostic("Failed to create update request body", err.Error())} } resp, err := client.API.PutActionsConnectorIdWithResponse(ctx, connectorOld.SpaceID, connectorOld.ConnectorID, body) if err != nil { - return "", diag.Errorf("unable to update connector: [%v]", err) + return "", fwdiag.Diagnostics{fwdiag.NewErrorDiagnostic("Unable to update connector", err.Error())} } switch resp.StatusCode() { case http.StatusOK: return resp.JSON200.Id, nil default: - return "", reportUnknownErrorSDK(resp.StatusCode(), resp.Body) + return "", reportUnknownError(resp.StatusCode(), resp.Body) } } -func GetConnector(ctx context.Context, client *Client, connectorID, spaceID string) (*models.KibanaActionConnector, diag.Diagnostics) { +func GetConnector(ctx context.Context, client *Client, connectorID, spaceID string) (*models.KibanaActionConnector, fwdiag.Diagnostics) { resp, err := client.API.GetActionsConnectorIdWithResponse(ctx, spaceID, connectorID) if err != nil { - return nil, diag.Errorf("unable to get connector: [%v]", err) + return nil, fwdiag.Diagnostics{fwdiag.NewErrorDiagnostic("Unable to get connector", err.Error())} } switch resp.StatusCode() { @@ -77,14 +78,14 @@ func GetConnector(ctx context.Context, client *Client, connectorID, spaceID stri case http.StatusNotFound: return nil, nil default: - return nil, reportUnknownErrorSDK(resp.StatusCode(), resp.Body) + return nil, reportUnknownError(resp.StatusCode(), resp.Body) } } -func SearchConnectors(ctx context.Context, client *Client, connectorName, spaceID, connectorTypeID string) ([]*models.KibanaActionConnector, diag.Diagnostics) { +func SearchConnectors(ctx context.Context, client *Client, connectorName, spaceID, connectorTypeID string) ([]*models.KibanaActionConnector, sdkdiag.Diagnostics) { resp, err := client.API.GetActionsConnectorsWithResponse(ctx, spaceID) if err != nil { - return nil, diag.Errorf("unable to get connectors: [%v]", err) + return nil, sdkdiag.Errorf("unable to get connectors: [%v]", err) } if resp.StatusCode() != http.StatusOK { @@ -101,9 +102,9 @@ func SearchConnectors(ctx context.Context, client *Client, connectorName, spaceI continue } - c, diags := ConnectorResponseToModel(spaceID, &connector) - if diags.HasError() { - return nil, diags + c, fwDiags := ConnectorResponseToModel(spaceID, &connector) + if fwDiags.HasError() { + return nil, utils.SDKDiagsFromFramework(fwDiags) } foundConnectors = append(foundConnectors, c) @@ -115,9 +116,9 @@ func SearchConnectors(ctx context.Context, client *Client, connectorName, spaceI return foundConnectors, nil } -func ConnectorResponseToModel(spaceID string, connector *kbapi.ConnectorResponse) (*models.KibanaActionConnector, diag.Diagnostics) { +func ConnectorResponseToModel(spaceID string, connector *kbapi.ConnectorResponse) (*models.KibanaActionConnector, fwdiag.Diagnostics) { if connector == nil { - return nil, diag.Errorf("connector response is nil") + return nil, fwdiag.Diagnostics{fwdiag.NewErrorDiagnostic("Invalid connector response", "connector response is nil")} } var configJSON []byte @@ -132,7 +133,7 @@ func ConnectorResponseToModel(spaceID string, connector *kbapi.ConnectorResponse var err error configJSON, err = json.Marshal(configMap) if err != nil { - return nil, diag.Errorf("unable to marshal config: %v", err) + return nil, fwdiag.Diagnostics{fwdiag.NewErrorDiagnostic("Unable to marshal config", err.Error())} } // If we have a specific config type, marshal into and out of that to @@ -141,7 +142,7 @@ func ConnectorResponseToModel(spaceID string, connector *kbapi.ConnectorResponse if ok { configJSONString, err := handler.remarshalConfig(string(configJSON)) if err != nil { - return nil, diag.Errorf("failed to remarshal config: %v", err) + return nil, fwdiag.Diagnostics{fwdiag.NewErrorDiagnostic("Failed to remarshal config", err.Error())} } configJSON = []byte(configJSONString) @@ -165,21 +166,21 @@ func ConnectorResponseToModel(spaceID string, connector *kbapi.ConnectorResponse return model, nil } -func DeleteConnector(ctx context.Context, client *Client, connectorID string, spaceID string) diag.Diagnostics { +func DeleteConnector(ctx context.Context, client *Client, connectorID string, spaceID string) fwdiag.Diagnostics { resp, err := client.API.DeleteActionsConnectorIdWithResponse(ctx, spaceID, connectorID) if err != nil { - return diag.Errorf("unable to delete connector: [%v]", err) + return fwdiag.Diagnostics{fwdiag.NewErrorDiagnostic("Unable to delete connector", err.Error())} } if resp.StatusCode() != http.StatusOK && resp.StatusCode() != http.StatusNoContent { - return reportUnknownErrorSDK(resp.StatusCode(), resp.Body) + return reportUnknownError(resp.StatusCode(), resp.Body) } return nil } type connectorConfigHandler struct { - defaults func(plan, backend string) (string, error) + defaults func(plan string) (string, error) remarshalConfig func(config string) (string, error) } @@ -193,7 +194,6 @@ var connectorConfigHandlers = map[string]connectorConfigHandler{ remarshalConfig: remarshalConfig[kbapi.EmailConfig], }, ".gemini": { - defaults: connectorConfigWithDefaultsGemini, remarshalConfig: remarshalConfig[kbapi.GeminiConfig], }, ".index": { @@ -205,7 +205,6 @@ var connectorConfigHandlers = map[string]connectorConfigHandler{ remarshalConfig: remarshalConfig[kbapi.JiraConfig], }, ".opsgenie": { - defaults: connectorConfigWithDefaultsOpsgenie, remarshalConfig: remarshalConfig[kbapi.OpsgenieConfig], }, ".pagerduty": { @@ -213,7 +212,6 @@ var connectorConfigHandlers = map[string]connectorConfigHandler{ remarshalConfig: remarshalConfig[kbapi.PagerdutyConfig], }, ".resilient": { - defaults: connectorConfigWithDefaultsResilient, remarshalConfig: remarshalConfig[kbapi.ResilientConfig], }, ".servicenow": { @@ -228,16 +226,17 @@ var connectorConfigHandlers = map[string]connectorConfigHandler{ defaults: connectorConfigWithDefaultsServicenowSir, remarshalConfig: remarshalConfig[kbapi.ServicenowConfig], }, + ".slack_api": { + remarshalConfig: remarshalConfig[kbapi.SlackApiConfig], + }, ".swimlane": { defaults: connectorConfigWithDefaultsSwimlane, remarshalConfig: remarshalConfig[kbapi.SwimlaneConfig], }, ".tines": { - defaults: connectorConfigWithDefaultsTines, remarshalConfig: remarshalConfig[kbapi.TinesConfig], }, ".webhook": { - defaults: connectorConfigWithDefaultsWebhook, remarshalConfig: remarshalConfig[kbapi.WebhookConfig], }, ".xmatters": { @@ -246,13 +245,17 @@ var connectorConfigHandlers = map[string]connectorConfigHandler{ }, } -func ConnectorConfigWithDefaults(connectorTypeID, plan, backend, state string) (string, error) { +func ConnectorConfigWithDefaults(connectorTypeID, plan string) (string, error) { handler, ok := connectorConfigHandlers[connectorTypeID] if !ok { return plan, errors.New("unknown connector type ID: " + connectorTypeID) } - return handler.defaults(plan, backend) + if handler.defaults == nil { + return plan, nil + } + + return handler.defaults(plan) } // User can omit optonal fields in config JSON. @@ -271,7 +274,7 @@ func remarshalConfig[T any](plan string) (string, error) { return string(customJSON), nil } -func connectorConfigWithDefaultsCasesWebhook(plan, _ string) (string, error) { +func connectorConfigWithDefaultsCasesWebhook(plan string) (string, error) { var custom kbapi.CasesWebhookConfig if err := json.Unmarshal([]byte(plan), &custom); err != nil { return "", err @@ -292,7 +295,7 @@ func connectorConfigWithDefaultsCasesWebhook(plan, _ string) (string, error) { return string(customJSON), nil } -func connectorConfigWithDefaultsEmail(plan, _ string) (string, error) { +func connectorConfigWithDefaultsEmail(plan string) (string, error) { var custom kbapi.EmailConfig if err := json.Unmarshal([]byte(plan), &custom); err != nil { return "", err @@ -310,11 +313,7 @@ func connectorConfigWithDefaultsEmail(plan, _ string) (string, error) { return string(customJSON), nil } -func connectorConfigWithDefaultsGemini(plan, _ string) (string, error) { - return plan, nil -} - -func connectorConfigWithDefaultsIndex(plan, _ string) (string, error) { +func connectorConfigWithDefaultsIndex(plan string) (string, error) { var custom kbapi.IndexConfig if err := json.Unmarshal([]byte(plan), &custom); err != nil { return "", err @@ -329,32 +328,20 @@ func connectorConfigWithDefaultsIndex(plan, _ string) (string, error) { return string(customJSON), nil } -func connectorConfigWithDefaultsJira(plan, _ string) (string, error) { +func connectorConfigWithDefaultsJira(plan string) (string, error) { return remarshalConfig[kbapi.JiraConfig](plan) } -func connectorConfigWithDefaultsOpsgenie(plan, _ string) (string, error) { - return plan, nil -} - -func connectorConfigWithDefaultsPagerduty(plan, _ string) (string, error) { +func connectorConfigWithDefaultsPagerduty(plan string) (string, error) { return remarshalConfig[kbapi.PagerdutyConfig](plan) } -func connectorConfigWithDefaultsResilient(plan, _ string) (string, error) { - return plan, nil -} - -func connectorConfigWithDefaultsServicenow(plan, backend string) (string, error) { +func connectorConfigWithDefaultsServicenow(plan string) (string, error) { var planConfig kbapi.ServicenowConfig if err := json.Unmarshal([]byte(plan), &planConfig); err != nil { return "", err } - var backendConfig kbapi.ServicenowConfig - if err := json.Unmarshal([]byte(backend), &backendConfig); err != nil { - return "", err - } - if planConfig.IsOAuth == nil && backendConfig.IsOAuth != nil && !*backendConfig.IsOAuth { + if planConfig.IsOAuth == nil { planConfig.IsOAuth = utils.Pointer(false) } if planConfig.UsesTableApi == nil { @@ -367,7 +354,7 @@ func connectorConfigWithDefaultsServicenow(plan, backend string) (string, error) return string(customJSON), nil } -func connectorConfigWithDefaultsServicenowItom(plan, _ string) (string, error) { +func connectorConfigWithDefaultsServicenowItom(plan string) (string, error) { var custom kbapi.ServicenowItomConfig if err := json.Unmarshal([]byte(plan), &custom); err != nil { return "", err @@ -382,11 +369,11 @@ func connectorConfigWithDefaultsServicenowItom(plan, _ string) (string, error) { return string(customJSON), nil } -func connectorConfigWithDefaultsServicenowSir(plan, backend string) (string, error) { - return connectorConfigWithDefaultsServicenow(plan, backend) +func connectorConfigWithDefaultsServicenowSir(plan string) (string, error) { + return connectorConfigWithDefaultsServicenow(plan) } -func connectorConfigWithDefaultsSwimlane(plan, _ string) (string, error) { +func connectorConfigWithDefaultsSwimlane(plan string) (string, error) { var custom kbapi.SwimlaneConfig if err := json.Unmarshal([]byte(plan), &custom); err != nil { return "", err @@ -444,15 +431,7 @@ func connectorConfigWithDefaultsSwimlane(plan, _ string) (string, error) { return string(customJSON), nil } -func connectorConfigWithDefaultsTines(plan, _ string) (string, error) { - return plan, nil -} - -func connectorConfigWithDefaultsWebhook(plan, _ string) (string, error) { - return plan, nil -} - -func connectorConfigWithDefaultsXmatters(plan, _ string) (string, error) { +func connectorConfigWithDefaultsXmatters(plan string) (string, error) { var custom kbapi.XmattersConfig if err := json.Unmarshal([]byte(plan), &custom); err != nil { return "", err diff --git a/internal/clients/kibana_oapi/connector_test.go b/internal/clients/kibana_oapi/connector_test.go index af45f0a27..d922d1f6a 100644 --- a/internal/clients/kibana_oapi/connector_test.go +++ b/internal/clients/kibana_oapi/connector_test.go @@ -11,7 +11,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" "github.com/elastic/terraform-provider-elasticstack/internal/models" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + fwdiag "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/stretchr/testify/require" ) @@ -21,7 +21,7 @@ func Test_connectorResponseToModel(t *testing.T) { spaceId string response *kbapi.ConnectorResponse expectedModel *models.KibanaActionConnector - expectedError diag.Diagnostics + expectedError fwdiag.Diagnostics } tests := []testCase{ { @@ -29,7 +29,7 @@ func Test_connectorResponseToModel(t *testing.T) { spaceId: "default", response: nil, expectedModel: nil, - expectedError: diag.Errorf("connector response is nil"), + expectedError: fwdiag.Diagnostics{fwdiag.NewErrorDiagnostic("Invalid connector response", "connector response is nil")}, }, { name: "should map valid connector response to model", diff --git a/internal/kibana/connector.go b/internal/kibana/connector.go deleted file mode 100644 index 419e1500d..000000000 --- a/internal/kibana/connector.go +++ /dev/null @@ -1,298 +0,0 @@ -package kibana - -import ( - "context" - - "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" - "github.com/elastic/terraform-provider-elasticstack/internal/models" - "github.com/elastic/terraform-provider-elasticstack/internal/utils" - "github.com/hashicorp/go-version" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" -) - -var MinVersionSupportingPreconfiguredIDs = version.Must(version.NewVersion("8.8.0")) - -func ResourceActionConnector() *schema.Resource { - var connectorSchema = map[string]*schema.Schema{ - "connector_id": { - Description: "A UUID v1 or v4 to use instead of a randomly generated ID.", - Type: schema.TypeString, - Computed: true, - Optional: true, - ForceNew: true, - ValidateFunc: validation.IsUUID, - }, - "space_id": { - Description: "An identifier for the space. If space_id is not provided, the default space is used.", - Type: schema.TypeString, - Optional: true, - Default: "default", - ForceNew: true, - }, - "name": { - Description: "The name of the connector. While this name does not have to be unique, a distinctive name can help you identify a connector.", - Type: schema.TypeString, - Required: true, - }, - "connector_type_id": { - Description: "The ID of the connector type, e.g. `.index`.", - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, - "config": { - Description: "The configuration for the connector. Configuration properties vary depending on the connector type.", - Type: schema.TypeString, - Optional: true, - Computed: true, - ValidateFunc: validation.StringIsJSON, - }, - "secrets": { - Description: "The secrets configuration for the connector. Secrets configuration properties vary depending on the connector type.", - Type: schema.TypeString, - Optional: true, - DiffSuppressFunc: utils.DiffJsonSuppress, - ValidateFunc: validation.StringIsJSON, - Sensitive: true, - }, - "is_deprecated": { - Description: "Indicates whether the connector type is deprecated.", - Type: schema.TypeBool, - Computed: true, - }, - "is_missing_secrets": { - Description: "Indicates whether secrets are missing for the connector.", - Type: schema.TypeBool, - Computed: true, - }, - "is_preconfigured": { - Description: "Indicates whether it is a preconfigured connector.", - Type: schema.TypeBool, - Computed: true, - }, - } - return &schema.Resource{ - Description: "Creates a Kibana action connector. See https://www.elastic.co/guide/en/kibana/current/action-types.html", - - CreateContext: resourceConnectorCreate, - UpdateContext: resourceConnectorUpdate, - ReadContext: resourceConnectorRead, - DeleteContext: resourceConnectorDelete, - CustomizeDiff: connectorCustomizeDiff, - - Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, - }, - - Schema: connectorSchema, - } -} - -func connectorCustomizeDiff(ctx context.Context, rd *schema.ResourceDiff, in interface{}) error { - if !rd.HasChange("config") { - return nil - } - oldVal, newVal := rd.GetChange("config") - oldJSON := oldVal.(string) - newJSON := newVal.(string) - if oldJSON == newJSON { - return nil - } - oldVal, newVal = rd.GetChange("connector_type_id") - oldTypeID := oldVal.(string) - newTypeID := newVal.(string) - if oldTypeID != newTypeID { - return nil - } - - rawState := rd.GetRawState() - if !rawState.IsKnown() || rawState.IsNull() { - return nil - } - - state := rawState.GetAttr("config") - if !state.IsKnown() || state.IsNull() { - return nil - } - - stateJSON := state.AsString() - - customJSON, err := kibana_oapi.ConnectorConfigWithDefaults(oldTypeID, newJSON, oldJSON, stateJSON) - if err != nil { - return err - } - return rd.SetNew("config", string(customJSON)) -} - -func resourceConnectorCreate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - - oapiClient, err := client.GetKibanaOapiClient() - if err != nil { - return diag.FromErr(err) - } - - connectorOld, diags := expandActionConnector(d) - if diags.HasError() { - return diags - } - - version, diags := client.ServerVersion(ctx) - if diags.HasError() { - return diags - } - - if connectorOld.ConnectorID != "" && version.LessThan(MinVersionSupportingPreconfiguredIDs) { - return diag.Errorf("Preconfigured connector IDs are only supported for Elastic Stack v%s and above. Either remove the `connector_id` attribute or upgrade your target cluster to supported version", MinVersionSupportingPreconfiguredIDs) - } - - connectorID, diags := kibana_oapi.CreateConnector(ctx, oapiClient, connectorOld) - - if diags.HasError() { - return diags - } - - compositeID := &clients.CompositeId{ClusterId: connectorOld.SpaceID, ResourceId: connectorID} - d.SetId(compositeID.String()) - - return resourceConnectorRead(ctx, d, meta) -} - -func resourceConnectorUpdate(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - - oapiClient, err := client.GetKibanaOapiClient() - if err != nil { - return diag.FromErr(err) - } - - connectorOld, diags := expandActionConnector(d) - if diags.HasError() { - return diags - } - - compositeIDold, diags := clients.CompositeIdFromStr(d.Id()) - if diags.HasError() { - return diags - } - connectorOld.ConnectorID = compositeIDold.ResourceId - - connectorID, diags := kibana_oapi.UpdateConnector(ctx, oapiClient, connectorOld) - - if diags.HasError() { - return diags - } - - compositeIDnew := &clients.CompositeId{ClusterId: connectorOld.SpaceID, ResourceId: connectorID} - d.SetId(compositeIDnew.String()) - - return resourceConnectorRead(ctx, d, meta) -} - -func resourceConnectorRead(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - - oapiClient, err := client.GetKibanaOapiClient() - if err != nil { - return diag.FromErr(err) - } - - compositeID, diags := clients.CompositeIdFromStr(d.Id()) - if diags.HasError() { - return diags - } - - connector, diags := kibana_oapi.GetConnector(ctx, oapiClient, compositeID.ResourceId, compositeID.ClusterId) - if connector == nil && diags == nil { - d.SetId("") - return diags - } - if diags.HasError() { - return diags - } - - return flattenActionConnector(connector, d) -} - -func resourceConnectorDelete(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client, diags := clients.NewApiClientFromSDKResource(d, meta) - if diags.HasError() { - return diags - } - - oapiClient, err := client.GetKibanaOapiClient() - if err != nil { - return diag.FromErr(err) - } - - compositeID, diags := clients.CompositeIdFromStr(d.Id()) - if diags.HasError() { - return diags - } - - spaceId := d.Get("space_id").(string) - - if diags := kibana_oapi.DeleteConnector(ctx, oapiClient, compositeID.ResourceId, spaceId); diags.HasError() { - return diags - } - - d.SetId("") - return nil -} - -func expandActionConnector(d *schema.ResourceData) (models.KibanaActionConnector, diag.Diagnostics) { - var diags diag.Diagnostics - - connector := models.KibanaActionConnector{ - ConnectorID: d.Get("connector_id").(string), - SpaceID: d.Get("space_id").(string), - Name: d.Get("name").(string), - ConnectorTypeID: d.Get("connector_type_id").(string), - } - - connector.ConfigJSON = d.Get("config").(string) - connector.SecretsJSON = d.Get("secrets").(string) - - return connector, diags -} - -func flattenActionConnector(connector *models.KibanaActionConnector, d *schema.ResourceData) diag.Diagnostics { - if err := d.Set("connector_id", connector.ConnectorID); err != nil { - return diag.FromErr(err) - } - if err := d.Set("space_id", connector.SpaceID); err != nil { - return diag.FromErr(err) - } - if err := d.Set("name", connector.Name); err != nil { - return diag.FromErr(err) - } - if err := d.Set("connector_type_id", connector.ConnectorTypeID); err != nil { - return diag.FromErr(err) - } - if err := d.Set("config", connector.ConfigJSON); err != nil { - return diag.FromErr(err) - } - if err := d.Set("is_deprecated", connector.IsDeprecated); err != nil { - return diag.FromErr(err) - } - if err := d.Set("is_missing_secrets", connector.IsMissingSecrets); err != nil { - return diag.FromErr(err) - } - if err := d.Set("is_preconfigured", connector.IsPreconfigured); err != nil { - return diag.FromErr(err) - } - - return nil -} diff --git a/internal/kibana/connector_data_source.go b/internal/kibana/connector_data_source.go index 07ec1a249..f0aae554f 100644 --- a/internal/kibana/connector_data_source.go +++ b/internal/kibana/connector_data_source.go @@ -5,6 +5,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) @@ -92,3 +93,32 @@ func datasourceConnectorRead(ctx context.Context, d *schema.ResourceData, meta i return flattenActionConnector(foundConnectors[0], d) } + +func flattenActionConnector(connector *models.KibanaActionConnector, d *schema.ResourceData) diag.Diagnostics { + if err := d.Set("connector_id", connector.ConnectorID); err != nil { + return diag.FromErr(err) + } + if err := d.Set("space_id", connector.SpaceID); err != nil { + return diag.FromErr(err) + } + if err := d.Set("name", connector.Name); err != nil { + return diag.FromErr(err) + } + if err := d.Set("connector_type_id", connector.ConnectorTypeID); err != nil { + return diag.FromErr(err) + } + if err := d.Set("config", connector.ConfigJSON); err != nil { + return diag.FromErr(err) + } + if err := d.Set("is_deprecated", connector.IsDeprecated); err != nil { + return diag.FromErr(err) + } + if err := d.Set("is_missing_secrets", connector.IsMissingSecrets); err != nil { + return diag.FromErr(err) + } + if err := d.Set("is_preconfigured", connector.IsPreconfigured); err != nil { + return diag.FromErr(err) + } + + return nil +} diff --git a/internal/kibana/connector_test.go b/internal/kibana/connector_test.go deleted file mode 100644 index 9005a125d..000000000 --- a/internal/kibana/connector_test.go +++ /dev/null @@ -1,1609 +0,0 @@ -package kibana_test - -import ( - "context" - "fmt" - "regexp" - "testing" - - "github.com/elastic/terraform-provider-elasticstack/internal/acctest" - "github.com/elastic/terraform-provider-elasticstack/internal/clients" - "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" - "github.com/elastic/terraform-provider-elasticstack/internal/kibana" - "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" - "github.com/google/uuid" - "github.com/hashicorp/go-version" - sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/terraform" -) - -func TestAccResourceKibanaConnectorCasesWebhook(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("8.4.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name, id string) string { - idAttribute := "" - if id != "" { - idAttribute = fmt.Sprintf(`connector_id = "%s"`, id) - } - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - %s - config = jsonencode({ - createIncidentJson = "{}" - createIncidentResponseKey = "key" - createIncidentUrl = "https://www.elastic.co/" - getIncidentResponseExternalTitleKey = "title" - getIncidentUrl = "https://www.elastic.co/" - updateIncidentJson = "{}" - updateIncidentUrl = "https://www.elastic.co/" - viewIncidentUrl = "https://www.elastic.co/" - }) - secrets = jsonencode({ - user = "user1" - password = "password1" - }) - connector_type_id = ".cases-webhook" - }`, - name, idAttribute) - } - - update := func(name, id string) string { - idAttribute := "" - if id != "" { - idAttribute = fmt.Sprintf(`connector_id = "%s"`, id) - } - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - %s - config = jsonencode({ - createIncidentJson = "{}" - createIncidentResponseKey = "key" - createIncidentUrl = "https://www.elastic.co/" - getIncidentResponseExternalTitleKey = "title" - getIncidentUrl = "https://www.elastic.co/" - updateIncidentJson = "{}" - updateIncidentUrl = "https://elasticsearch.com/" - viewIncidentUrl = "https://www.elastic.co/" - createIncidentMethod = "put" - }) - secrets = jsonencode({ - user = "user2" - password = "password2" - }) - connector_type_id = ".cases-webhook" - }`, - name, idAttribute) - } - - for _, connectorID := range []string{"", uuid.NewString()} { - t.Run(fmt.Sprintf("with connector ID '%s'", connectorID), func(t *testing.T) { - minVersion := minSupportedVersion - if connectorID != "" { - minVersion = kibana.MinVersionSupportingPreconfiguredIDs - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minVersion), - Config: create(connectorName, connectorID), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".cases-webhook"), - - resource.TestCheckResourceAttrWith("elasticstack_kibana_action_connector.test", "connector_id", func(value string) error { - if connectorID == "" { - if _, err := uuid.Parse(value); err != nil { - return fmt.Errorf("expected connector_id to be a uuid: %w", err) - } - - return nil - } - - if connectorID != value { - return fmt.Errorf("expected connector_id to match pre-defined id. '%s' != %s", connectorID, value) - } - - return nil - }), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentJson\":\"{}\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentResponseKey\":\"key\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentUrl\":\"https://www\.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentResponseExternalTitleKey\":\"title\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentUrl\":\"https://www\.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentJson\":\"{}\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentUrl\":\"https://www.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"viewIncidentUrl\":\"https://www\.elastic\.co/\"`)), - // `post` is the default value that is returned by backend - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`"createIncidentMethod\":\"post\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"user\":\"user1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password1\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minVersion), - Config: update(connectorName, connectorID), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".cases-webhook"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentJson\":\"{}\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentResponseKey\":\"key\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentUrl\":\"https://www\.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentResponseExternalTitleKey\":\"title\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentUrl\":\"https://www\.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentJson\":\"{}\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentUrl\":\"https://elasticsearch\.com/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"viewIncidentUrl\":\"https://www\.elastic\.co/\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`createIncidentMethod\":\"put\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"user\":\"user2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password2\"`)), - ), - }, - }, - }) - }) - } -} - -func TestAccResourceKibanaConnectorEmail(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - from = "test@elastic.co" - port = 111 - host = "localhost" - }) - secrets = jsonencode({}) - connector_type_id = ".email" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - from = "test2@elastic.co" - port = 222 - host = "localhost" - }) - secrets = jsonencode({ - user = "user1" - password = "password1" - }) - connector_type_id = ".email" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".email"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"from\":\"test@elastic\.co\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"port\":111`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"host\":\"localhost\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".email"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"from\":\"test2@elastic\.co\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"port\":222`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"host\":\"localhost\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"user\":\"user1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password1\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorGemini(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("8.15.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - apiUrl = "https://elastic.co", - gcpRegion = "us-central1", - gcpProjectID = "project1", - defaultModel = "gemini-1.5-pro-001" - }) - secrets = jsonencode({ - credentialsJson = "secret1" - }) - connector_type_id = ".gemini" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - apiUrl = "https://elasticsearch.com", - gcpRegion = "us-east4", - gcpProjectID = "project2", - defaultModel = "gemini-1.5-pro-001" - }) - secrets = jsonencode({ - credentialsJson = "secret2" - }) - connector_type_id = ".gemini" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".gemini"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elastic\.co\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"gcpRegion\":\"us-central1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"gcpProjectID\":\"project1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"defaultModel\":\"gemini-1.5-pro-001\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"credentialsJson\":\"secret1\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".gemini"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elasticsearch\.com\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"gcpRegion\":\"us-east4\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"gcpProjectID\":\"project2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"defaultModel\":\"gemini-1.5-pro-001\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"credentialsJson\":\"secret2\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorIndex(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - index = ".kibana" - refresh = true - }) - connector_type_id = ".index" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - index = ".kibana" - refresh = false - }) - connector_type_id = ".index" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".index"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"index\":\"\.kibana\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"refresh\":true`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".index"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"index\":\"\.kibana\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"refresh\":false`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorJira(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - apiUrl = "url1" - projectKey = "project1" - }) - secrets = jsonencode({ - apiToken = "secret1" - email = "email1" - }) - connector_type_id = ".jira" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - apiUrl = "url2" - projectKey = "project2" - }) - secrets = jsonencode({ - apiToken = "secret2" - email = "email2" - }) - connector_type_id = ".jira" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".jira"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"url1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"projectKey\":\"project1\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"apiToken\":\"secret1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"email\":\"email1\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".jira"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"url2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"projectKey\":\"project2\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"apiToken\":\"secret2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"email\":\"email2\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorOpsgenie(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("8.6.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - apiUrl = "https://elastic.co" - }) - secrets = jsonencode({ - apiKey = "key1" - }) - connector_type_id = ".opsgenie" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - apiUrl = "https://elasticsearch.com" - }) - secrets = jsonencode({ - apiKey = "key2" - }) - connector_type_id = ".opsgenie" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".opsgenie"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elastic\.co\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"apiKey\":\"key1\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".opsgenie"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elasticsearch\.com\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"apiKey\":\"key2\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorPagerduty(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - apiUrl = "https://elastic.co" - }) - secrets = jsonencode({ - routingKey = "test1" - }) - connector_type_id = ".pagerduty" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - apiUrl = "https://elasticsearch.com" - }) - secrets = jsonencode({ - routingKey = "test2" - }) - connector_type_id = ".pagerduty" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".pagerduty"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elastic\.co\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"routingKey\":\"test1\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".pagerduty"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elasticsearch\.com\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"routingKey\":\"test2\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorResilient(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - apiUrl = "https://elastic.co" - orgId = "id1" - }) - secrets = jsonencode({ - apiKeyId = "key1" - apiKeySecret = "secret1" - }) - connector_type_id = ".resilient" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - apiUrl = "https://elasticsearch.com" - orgId = "id2" - }) - secrets = jsonencode({ - apiKeyId = "key2" - apiKeySecret = "secret2" - }) - connector_type_id = ".resilient" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".resilient"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elastic\.co\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"orgId\":\"id1\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"apiKeyId\":\"key1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"apiKeySecret\":\"secret1\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".resilient"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elasticsearch\.com\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"orgId\":\"id2\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"apiKeyId\":\"key2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"apiKeySecret\":\"secret2\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorServerLog(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - connector_type_id = ".server-log" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - connector_type_id = ".server-log" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".server-log"), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".server-log"), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorServicenow(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - apiUrl = "https://elastic.co" - }) - secrets = jsonencode({ - username = "user1" - password = "password1" - }) - connector_type_id = ".servicenow" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - apiUrl = "https://elasticsearch.com" - - }) - secrets = jsonencode({ - username = "user2" - password = "password2" - }) - connector_type_id = ".servicenow" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "name", connectorName), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "connector_type_id", ".servicenow"), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_missing_secrets", "false"), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_preconfigured", "false"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elastic\.co\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"username\":\"user1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password1\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "name", fmt.Sprintf("Updated %s", connectorName)), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "connector_type_id", ".servicenow"), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_missing_secrets", "false"), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_preconfigured", "false"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elasticsearch\.com\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"username\":\"user2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password2\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorServicenowItom(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("8.3.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - apiUrl = "https://elastic.co" - }) - secrets = jsonencode({ - username = "user1" - password = "password1" - }) - connector_type_id = ".servicenow-itom" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - apiUrl = "https://elasticsearch.com" - }) - secrets = jsonencode({ - username = "user2" - password = "password2" - }) - connector_type_id = ".servicenow-itom" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".servicenow-itom"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elastic\.co\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"username\":\"user1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password1\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".servicenow-itom"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elasticsearch\.com\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"username\":\"user2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password2\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorServicenowSir(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - apiUrl = "https://elastic.co" - }) - secrets = jsonencode({ - username = "user1" - password = "password1" - }) - connector_type_id = ".servicenow-sir" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - apiUrl = "https://elasticsearch.com" - }) - secrets = jsonencode({ - username = "user2" - password = "password2" - }) - connector_type_id = ".servicenow-sir" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "name", connectorName), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "connector_type_id", ".servicenow-sir"), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_missing_secrets", "false"), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_preconfigured", "false"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elastic\.co\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"username\":\"user1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password1\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "name", fmt.Sprintf("Updated %s", connectorName)), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "connector_type_id", ".servicenow-sir"), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_missing_secrets", "false"), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_preconfigured", "false"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elasticsearch\.com\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"username\":\"user2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password2\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorSlack(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - secrets = jsonencode({ - webhookUrl = "https://elastic.co" - }) - connector_type_id = ".slack" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - secrets = jsonencode({ - webhookUrl = "https://elasticsearch.com" - }) - connector_type_id = ".slack" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".slack"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"webhookUrl\":\"https://elastic\.co\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".slack"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"webhookUrl\":\"https://elasticsearch\.com\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorSlackApi(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("8.8.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - secrets = jsonencode({ - token = "my-token" - }) - connector_type_id = ".slack_api" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - secrets = jsonencode({ - token = "my-updated-token" - }) - connector_type_id = ".slack_api" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".slack_api"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"token\":\"my-token\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".slack_api"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"token\":\"my-updated-token\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorSwimlane(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - apiUrl = "https://elastic.co" - appId = "test1" - connectorType = "all" - mappings = { - alertIdConfig = { - fieldType = "type1" - id = "id1" - key = "key1" - name = "name1" - } - } - }) - secrets = jsonencode({ - apiToken = "token1" - }) - connector_type_id = ".swimlane" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - apiUrl = "https://elasticsearch.com" - appId = "test2" - connectorType = "all" - mappings = { - alertIdConfig = { - fieldType = "type2" - id = "id2" - key = "key2" - name = "name2" - } - } - }) - secrets = jsonencode({ - apiToken = "token2" - }) - connector_type_id = ".swimlane" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".swimlane"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elastic\.co\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"appId\":\"test1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"connectorType\":\"all\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"fieldType\":\"type1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"id\":\"id1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"key\":\"key1\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"name\":\"name1\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"apiToken\":\"token1\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".swimlane"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"apiUrl\":\"https://elasticsearch\.com\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"appId\":\"test2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"connectorType\":\"all\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"fieldType\":\"type2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"id\":\"id2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"key\":\"key2\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"name\":\"name2\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"apiToken\":\"token2\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorTeams(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - secrets = jsonencode({ - webhookUrl = "https://elastic.co" - }) - connector_type_id = ".teams" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - secrets = jsonencode({ - webhookUrl = "https://elasticsearch.com" - }) - connector_type_id = ".teams" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".teams"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"webhookUrl\":\"https://elastic\.co\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".teams"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"webhookUrl\":\"https://elasticsearch\.com\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorTines(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("8.6.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - url = "https://elastic.co" - }) - secrets = jsonencode({ - email = "test@elastic.co" - token = "token1" - }) - connector_type_id = ".tines" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - url = "https://elasticsearch.com" - }) - secrets = jsonencode({ - email = "test@elasticsearch.com" - token = "token2" - }) - connector_type_id = ".tines" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".tines"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"url\":\"https://elastic\.co\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"email\":\"test@elastic\.co\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"token\":\"token1"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".tines"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"url\":\"https://elasticsearch\.com\"`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"email\":\"test@elasticsearch\.com\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"token\":\"token2"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorWebhook(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("7.14.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - url = "https://elastic.co" - hasAuth = true - method = "post" - }) - secrets = jsonencode({}) - connector_type_id = ".webhook" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - url = "https://elasticsearch.com" - hasAuth = true - method = "post" - }) - secrets = jsonencode({}) - connector_type_id = ".webhook" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".webhook"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"url\":\"https://elastic\.co\"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".webhook"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"url\":\"https://elasticsearch\.com\"`)), - ), - }, - }, - }) -} - -func TestAccResourceKibanaConnectorXmatters(t *testing.T) { - minSupportedVersion := version.Must(version.NewSemver("8.2.0")) - - connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - - create := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "%s" - config = jsonencode({ - configUrl = "https://elastic.co" - usesBasic = true - }) - secrets = jsonencode({ - user = "user1" - password = "password1" - }) - connector_type_id = ".xmatters" - }`, - name) - } - - update := func(name string) string { - return fmt.Sprintf(` - provider "elasticstack" { - elasticsearch {} - kibana {} - } - - resource "elasticstack_kibana_action_connector" "test" { - name = "Updated %s" - config = jsonencode({ - usesBasic = false - }) - secrets = jsonencode({ - secretsUrl = "https://elasticsearch.com" - }) - connector_type_id = ".xmatters" - }`, - name) - } - - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceKibanaConnectorDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: create(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(connectorName, ".xmatters"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"configUrl\":\"https://elastic\.co\"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"usesBasic\":true`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"user\":\"user1"`)), - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password1"`)), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), - Config: update(connectorName), - Check: resource.ComposeTestCheckFunc( - testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".xmatters"), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"usesBasic\":false`)), - - resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"secretsUrl\":\"https://elasticsearch\.com\"`)), - ), - }, - }, - }) -} - -func testCommonAttributes(connectorName, connectorTypeID string) resource.TestCheckFunc { - return resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "name", connectorName), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "connector_type_id", connectorTypeID), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_deprecated", "false"), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_missing_secrets", "false"), - resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_preconfigured", "false"), - ) -} - -func checkResourceKibanaConnectorDestroy(s *terraform.State) error { - client, err := clients.NewAcceptanceTestingClient() - if err != nil { - return err - } - - oapiClient, err := client.GetKibanaOapiClient() - if err != nil { - return err - } - - for _, rs := range s.RootModule().Resources { - if rs.Type != "elasticstack_kibana_action_connector" { - continue - } - compId, _ := clients.CompositeIdFromStr(rs.Primary.ID) - - connector, diags := kibana_oapi.GetConnector(context.Background(), oapiClient, compId.ResourceId, compId.ClusterId) - if diags.HasError() { - return fmt.Errorf("Failed to get connector: %v", diags) - } - - if connector != nil { - return fmt.Errorf("Action connector (%s) still exists", compId.ResourceId) - } - } - return nil -} diff --git a/internal/kibana/connectors/acc_test.go b/internal/kibana/connectors/acc_test.go new file mode 100644 index 000000000..9b362a18f --- /dev/null +++ b/internal/kibana/connectors/acc_test.go @@ -0,0 +1,331 @@ +package connectors_test + +import ( + "context" + "fmt" + "regexp" + "testing" + + "github.com/elastic/terraform-provider-elasticstack/internal/acctest" + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/elastic/terraform-provider-elasticstack/internal/kibana/connectors" + "github.com/elastic/terraform-provider-elasticstack/internal/versionutils" + "github.com/google/uuid" + "github.com/hashicorp/go-version" + sdkacctest "github.com/hashicorp/terraform-plugin-testing/helper/acctest" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/terraform" +) + +func TestAccResourceKibanaConnectorCasesWebhook(t *testing.T) { + minSupportedVersion := version.Must(version.NewSemver("8.4.0")) + + connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) + + create := func(name, id string) string { + idAttribute := "" + if id != "" { + idAttribute = fmt.Sprintf(`connector_id = "%s"`, id) + } + return fmt.Sprintf(` + provider "elasticstack" { + elasticsearch {} + kibana {} + } + + resource "elasticstack_kibana_action_connector" "test" { + name = "%s" + %s + config = jsonencode({ + createIncidentJson = "{}" + createIncidentResponseKey = "key" + createIncidentUrl = "https://www.elastic.co/" + getIncidentResponseExternalTitleKey = "title" + getIncidentUrl = "https://www.elastic.co/" + updateIncidentJson = "{}" + updateIncidentUrl = "https://www.elastic.co/" + viewIncidentUrl = "https://www.elastic.co/" + }) + secrets = jsonencode({ + user = "user1" + password = "password1" + }) + connector_type_id = ".cases-webhook" + }`, + name, idAttribute) + } + + update := func(name, id string) string { + idAttribute := "" + if id != "" { + idAttribute = fmt.Sprintf(`connector_id = "%s"`, id) + } + return fmt.Sprintf(` + provider "elasticstack" { + elasticsearch {} + kibana {} + } + + resource "elasticstack_kibana_action_connector" "test" { + name = "Updated %s" + %s + config = jsonencode({ + createIncidentJson = "{}" + createIncidentResponseKey = "key" + createIncidentUrl = "https://www.elastic.co/" + getIncidentResponseExternalTitleKey = "title" + getIncidentUrl = "https://www.elastic.co/" + updateIncidentJson = "{}" + updateIncidentUrl = "https://elasticsearch.com/" + viewIncidentUrl = "https://www.elastic.co/" + createIncidentMethod = "put" + }) + secrets = jsonencode({ + user = "user2" + password = "password2" + }) + connector_type_id = ".cases-webhook" + }`, + name, idAttribute) + } + + for _, connectorID := range []string{"", uuid.NewString()} { + t.Run(fmt.Sprintf("with connector ID '%s'", connectorID), func(t *testing.T) { + minVersion := minSupportedVersion + if connectorID != "" { + minVersion = connectors.MinVersionSupportingPreconfiguredIDs + } + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + CheckDestroy: checkResourceKibanaConnectorDestroy, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minVersion), + Config: create(connectorName, connectorID), + Check: resource.ComposeTestCheckFunc( + testCommonAttributes(connectorName, ".cases-webhook"), + + resource.TestCheckResourceAttrWith("elasticstack_kibana_action_connector.test", "connector_id", func(value string) error { + if connectorID == "" { + if _, err := uuid.Parse(value); err != nil { + return fmt.Errorf("expected connector_id to be a uuid: %w", err) + } + + return nil + } + + if connectorID != value { + return fmt.Errorf("expected connector_id to match pre-defined id. '%s' != %s", connectorID, value) + } + + return nil + }), + + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentJson\":\"{}\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentResponseKey\":\"key\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentUrl\":\"https://www\.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentResponseExternalTitleKey\":\"title\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentUrl\":\"https://www\.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentJson\":\"{}\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentUrl\":\"https://www.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"viewIncidentUrl\":\"https://www\.elastic\.co/\"`)), + + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"user\":\"user1\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password1\"`)), + ), + }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minVersion), + Config: update(connectorName, connectorID), + Check: resource.ComposeTestCheckFunc( + testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".cases-webhook"), + + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentJson\":\"{}\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentResponseKey\":\"key\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentUrl\":\"https://www\.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentResponseExternalTitleKey\":\"title\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"getIncidentUrl\":\"https://www\.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentJson\":\"{}\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"updateIncidentUrl\":\"https://elasticsearch\.com/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"viewIncidentUrl\":\"https://www\.elastic\.co/\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"createIncidentMethod\":\"put\"`)), + + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"user\":\"user2\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "secrets", regexp.MustCompile(`\"password\":\"password2\"`)), + ), + }, + }, + }) + }) + } +} + +func testCommonAttributes(connectorName, connectorTypeID string) resource.TestCheckFunc { + return resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "name", connectorName), + resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "connector_type_id", connectorTypeID), + resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_deprecated", "false"), + resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_missing_secrets", "false"), + resource.TestCheckResourceAttr("elasticstack_kibana_action_connector.test", "is_preconfigured", "false"), + ) +} + +func checkResourceKibanaConnectorDestroy(s *terraform.State) error { + client, err := clients.NewAcceptanceTestingClient() + if err != nil { + return err + } + + oapiClient, err := client.GetKibanaOapiClient() + if err != nil { + return err + } + + for _, rs := range s.RootModule().Resources { + if rs.Type != "elasticstack_kibana_action_connector" { + continue + } + compId, _ := clients.CompositeIdFromStr(rs.Primary.ID) + + connector, diags := kibana_oapi.GetConnector(context.Background(), oapiClient, compId.ResourceId, compId.ClusterId) + if diags.HasError() { + return fmt.Errorf("Failed to get connector: %v", diags) + } + + if connector != nil { + return fmt.Errorf("Action connector (%s) still exists", compId.ResourceId) + } + } + return nil +} + +func TestAccResourceKibanaConnectorIndex(t *testing.T) { + minSupportedVersion := version.Must(version.NewSemver("7.14.0")) + + connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) + + create := func(name string) string { + return fmt.Sprintf(` + provider "elasticstack" { + elasticsearch {} + kibana {} + } + + resource "elasticstack_kibana_action_connector" "test" { + name = "%s" + config = jsonencode({ + index = ".kibana" + refresh = true + }) + connector_type_id = ".index" + }`, + name) + } + + update := func(name string) string { + return fmt.Sprintf(` + provider "elasticstack" { + elasticsearch {} + kibana {} + } + + resource "elasticstack_kibana_action_connector" "test" { + name = "Updated %s" + config = jsonencode({ + index = ".kibana" + refresh = false + }) + connector_type_id = ".index" + }`, + name) + } + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + CheckDestroy: checkResourceKibanaConnectorDestroy, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), + Config: create(connectorName), + Check: resource.ComposeTestCheckFunc( + testCommonAttributes(connectorName, ".index"), + + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"index\":\"\.kibana\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"refresh\":true`)), + ), + }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), + Config: update(connectorName), + Check: resource.ComposeTestCheckFunc( + testCommonAttributes(fmt.Sprintf("Updated %s", connectorName), ".index"), + + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"index\":\"\.kibana\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"refresh\":false`)), + ), + }, + }, + }) +} + +func TestAccResourceKibanaConnectorFromSDK(t *testing.T) { + minSupportedVersion := version.Must(version.NewSemver("7.14.0")) + + connectorName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) + + create := func(name string) string { + return fmt.Sprintf(` + provider "elasticstack" { + elasticsearch {} + kibana {} + } + + resource "elasticstack_kibana_action_connector" "test" { + name = "%s" + config = jsonencode({ + index = ".kibana" + refresh = true + }) + connector_type_id = ".index" + }`, + name) + } + + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + CheckDestroy: checkResourceKibanaConnectorDestroy, + Steps: []resource.TestStep{ + { + // Create the connector with the last provider version where the connector resource was built on the SDK + ExternalProviders: map[string]resource.ExternalProvider{ + "elasticstack": { + Source: "elastic/elasticstack", + VersionConstraint: "0.11.17", + }, + }, + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), + Config: create(connectorName), + Check: resource.ComposeTestCheckFunc( + testCommonAttributes(connectorName, ".index"), + + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"index\":\"\.kibana\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"refresh\":true`)), + ), + }, + { + ProtoV6ProviderFactories: acctest.Providers, + SkipFunc: versionutils.CheckIfVersionIsUnsupported(minSupportedVersion), + Config: create(connectorName), + Check: resource.ComposeTestCheckFunc( + testCommonAttributes(connectorName, ".index"), + + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"index\":\"\.kibana\"`)), + resource.TestMatchResourceAttr("elasticstack_kibana_action_connector.test", "config", regexp.MustCompile(`\"refresh\":true`)), + ), + }, + }, + }) +} diff --git a/internal/kibana/connectors/config_type.go b/internal/kibana/connectors/config_type.go new file mode 100644 index 000000000..9bbbfa104 --- /dev/null +++ b/internal/kibana/connectors/config_type.go @@ -0,0 +1,86 @@ +package connectors + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" +) + +var ( + _ basetypes.StringTypable = (*ConfigType)(nil) +) + +type ConfigType struct { + jsontypes.NormalizedType +} + +// String returns a human readable string of the type name. +func (t ConfigType) String() string { + return "connectors.ConfigType" +} + +// ValueType returns the Value type. +func (t ConfigType) ValueType(ctx context.Context) attr.Value { + return ConfigValue{} +} + +// Equal returns true if the given type is equivalent. +func (t ConfigType) Equal(o attr.Type) bool { + other, ok := o.(ConfigType) + + if !ok { + return false + } + + return t.StringType.Equal(other.StringType) +} + +// ValueFromString returns a StringValuable type given a StringValue. +func (t ConfigType) ValueFromString(ctx context.Context, in basetypes.StringValue) (basetypes.StringValuable, diag.Diagnostics) { + var connectorTypeID string + if utils.IsKnown(in) { + var configMap map[string]interface{} + if err := json.Unmarshal([]byte(in.ValueString()), &configMap); err != nil { + return nil, diag.Diagnostics{ + diag.NewErrorDiagnostic("Failed to unmarshal config value", err.Error()), + } + } + + connectorTypeID, _ = configMap[connectorTypeIDKey].(string) + } + + return ConfigValue{ + Normalized: jsontypes.Normalized{ + StringValue: in, + }, + connectorTypeID: connectorTypeID, + }, nil +} + +// ValueFromTerraform returns a Value given a tftypes.Value. This is meant to convert the tftypes.Value into a more convenient Go type +// for the provider to consume the data with. +func (t ConfigType) ValueFromTerraform(ctx context.Context, in tftypes.Value) (attr.Value, error) { + attrValue, err := t.StringType.ValueFromTerraform(ctx, in) + if err != nil { + return nil, err + } + + stringValue, ok := attrValue.(basetypes.StringValue) + if !ok { + return nil, fmt.Errorf("unexpected value type of %T", attrValue) + } + + stringValuable, diags := t.ValueFromString(ctx, stringValue) + if diags.HasError() { + return nil, fmt.Errorf("unexpected error converting StringValue to StringValuable: %v", diags) + } + + return stringValuable, nil +} diff --git a/internal/kibana/connectors/config_type_test.go b/internal/kibana/connectors/config_type_test.go new file mode 100644 index 000000000..13f61c78b --- /dev/null +++ b/internal/kibana/connectors/config_type_test.go @@ -0,0 +1,176 @@ +package connectors + +import ( + "context" + "testing" + + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/hashicorp/terraform-plugin-go/tftypes" + "github.com/stretchr/testify/require" +) + +func TestConfigType_ValueFromString(t *testing.T) { + tests := []struct { + name string + input basetypes.StringValue + expectedConnectorID string + expectError bool + }{ + { + name: "valid JSON config with connector type ID", + input: basetypes.NewStringValue(`{"key": "value", "__tf_provider_connector_type_id": "my-connector"}`), + expectedConnectorID: "my-connector", + expectError: false, + }, + { + name: "valid JSON config without connector type ID", + input: basetypes.NewStringValue(`{"key": "value"}`), + expectedConnectorID: "", + expectError: false, + }, + { + name: "empty JSON config", + input: basetypes.NewStringValue(`{}`), + expectedConnectorID: "", + expectError: false, + }, + { + name: "invalid JSON config", + input: basetypes.NewStringValue(`{invalid json`), + expectError: true, + }, + { + name: "null string value", + input: basetypes.NewStringNull(), + expectedConnectorID: "", + expectError: false, + }, + { + name: "unknown string value", + input: basetypes.NewStringUnknown(), + expectedConnectorID: "", + expectError: false, + }, + { + name: "JSON with non-string connector type ID", + input: basetypes.NewStringValue(`{"key": "value", "__tf_provider_connector_type_id": 123}`), + expectedConnectorID: "", + expectError: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + configType := ConfigType{} + result, diags := configType.ValueFromString(context.Background(), tt.input) + + if tt.expectError { + require.True(t, diags.HasError(), "Expected an error but got none") + return + } + + require.False(t, diags.HasError(), "Unexpected error: %v", diags) + require.NotNil(t, result, "Result should not be nil") + + configValue, ok := result.(ConfigValue) + require.True(t, ok, "Result should be of type ConfigValue") + + require.Equal(t, tt.expectedConnectorID, configValue.connectorTypeID, "Connector type ID mismatch") + require.Equal(t, tt.input, configValue.StringValue, "String value should be preserved") + }) + } +} + +func TestConfigType_ValueFromTerraform(t *testing.T) { + tests := []struct { + name string + tfValue tftypes.Value + expectedValue attr.Value + expectedError string + }{ + { + name: "valid string value with JSON config", + tfValue: tftypes.NewValue(tftypes.String, `{"key": "value", "__tf_provider_connector_type_id": "test-connector"}`), + expectedValue: ConfigValue{ + Normalized: func() jsontypes.Normalized { + return jsontypes.NewNormalizedValue(`{"key": "value", "__tf_provider_connector_type_id": "test-connector"}`) + }(), + connectorTypeID: "test-connector", + }, + }, + { + name: "valid string value with empty JSON", + tfValue: tftypes.NewValue(tftypes.String, `{}`), + expectedValue: ConfigValue{ + Normalized: func() jsontypes.Normalized { + n, _ := jsontypes.NewNormalizedValue(`{}`).ToStringValue(context.Background()) + return jsontypes.Normalized{StringValue: n} + }(), + connectorTypeID: "", + }, + }, + { + name: "null string value", + tfValue: tftypes.NewValue(tftypes.String, nil), + expectedValue: ConfigValue{ + Normalized: func() jsontypes.Normalized { + return jsontypes.Normalized{StringValue: basetypes.NewStringNull()} + }(), + connectorTypeID: "", + }, + }, + { + name: "unknown string value", + tfValue: tftypes.NewValue(tftypes.String, tftypes.UnknownValue), + expectedValue: ConfigValue{ + Normalized: func() jsontypes.Normalized { + return jsontypes.Normalized{StringValue: basetypes.NewStringUnknown()} + }(), + connectorTypeID: "", + }, + }, + { + name: "non-string terraform value", + tfValue: tftypes.NewValue(tftypes.Bool, true), + expectedValue: nil, + expectedError: "expected string", + }, + { + name: "invalid JSON in string value", + tfValue: tftypes.NewValue(tftypes.String, `{invalid json`), + expectedValue: nil, + expectedError: "unexpected error converting StringValue to StringValuable", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + configType := ConfigType{} + result, err := configType.ValueFromTerraform(context.Background(), tt.tfValue) + + if tt.expectedError != "" { + require.Error(t, err, "Expected an error but got none") + require.Contains(t, err.Error(), tt.expectedError, "Error message should contain expected text") + require.Nil(t, result, "Result should be nil when there's an error") + return + } + + require.NoError(t, err, "Unexpected error: %v", err) + require.NotNil(t, result, "Result should not be nil") + + configValue, ok := result.(ConfigValue) + require.True(t, ok, "Result should be of type ConfigValue") + + expectedConfigValue, ok := tt.expectedValue.(ConfigValue) + require.True(t, ok, "Expected value should be of type ConfigValue") + + // Compare the connector type ID + require.Equal(t, expectedConfigValue.connectorTypeID, configValue.connectorTypeID, "Connector type ID mismatch") + + // Compare the underlying string values + require.Equal(t, expectedConfigValue.StringValue.Equal(configValue.StringValue), true, "String values should be equal") + }) + } +} diff --git a/internal/kibana/connectors/config_value.go b/internal/kibana/connectors/config_value.go new file mode 100644 index 000000000..baafc3fa1 --- /dev/null +++ b/internal/kibana/connectors/config_value.go @@ -0,0 +1,180 @@ +package connectors + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/attr/xattr" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +var ( + _ basetypes.StringValuable = (*ConfigValue)(nil) + _ basetypes.StringValuableWithSemanticEquals = (*ConfigValue)(nil) + _ xattr.ValidateableAttribute = (*ConfigValue)(nil) +) + +type ConfigValue struct { + jsontypes.Normalized + connectorTypeID string +} + +// Type returns a ConfigType. +func (v ConfigValue) Type(_ context.Context) attr.Type { + return ConfigType{} +} + +// Equal returns true if the given value is equivalent. +func (v ConfigValue) Equal(o attr.Value) bool { + other, ok := o.(ConfigValue) + + if !ok { + return false + } + + return v.StringValue.Equal(other.StringValue) +} + +func (t ConfigValue) ValidateAttribute(ctx context.Context, req xattr.ValidateAttributeRequest, resp *xattr.ValidateAttributeResponse) { + if t.IsNull() || t.IsUnknown() { + return + } + + t.Normalized.ValidateAttribute(ctx, req, resp) +} + +func (v ConfigValue) SanitizedValue() (string, diag.Diagnostics) { + var diags diag.Diagnostics + + if v.IsNull() { + return "", diags + } + + if v.IsUnknown() { + return "", diags + } + + var unsanitizedMap map[string]interface{} + err := json.Unmarshal([]byte(v.ValueString()), &unsanitizedMap) + if err != nil { + diags.AddError("Failed to unmarshal config value", err.Error()) + return "", diags + } + + delete(unsanitizedMap, connectorTypeIDKey) + sanitizedValue, err := json.Marshal(unsanitizedMap) + if err != nil { + diags.AddError("Failed to marshal sanitized config value", err.Error()) + return "", diags + } + + return string(sanitizedValue), diags +} + +// StringSemanticEquals returns true if the given config object value is semantically equal to the current config object value. +// The comparison will ignore any default values present in one value, but unset in the other. +func (v ConfigValue) StringSemanticEquals(ctx context.Context, newValuable basetypes.StringValuable) (bool, diag.Diagnostics) { + var diags diag.Diagnostics + + newValue, ok := newValuable.(ConfigValue) + if !ok { + diags.AddError( + "Semantic Equality Check Error", + "An unexpected value type was received while performing semantic equality checks. "+ + "Please report this to the provider developers.\n\n"+ + "Expected Value Type: "+fmt.Sprintf("%T", v)+"\n"+ + "Got Value Type: "+fmt.Sprintf("%T", newValuable), + ) + + return false, diags + } + + if v.IsNull() { + return newValue.IsNull(), diags + } + + if v.IsUnknown() { + return newValue.IsUnknown(), diags + } + + connectorTypeID := v.connectorTypeID + if connectorTypeID == "" { + connectorTypeID = newValue.connectorTypeID + } + + if connectorTypeID == "" { + // We cannot manage default values without a connector type ID. + return v.Normalized.StringSemanticEquals(ctx, newValue.Normalized) + } + + thisString, diags := v.SanitizedValue() + if diags.HasError() { + return false, diags + } + thatString, diags := newValue.SanitizedValue() + if diags.HasError() { + return false, diags + } + + thisWithDefaults, err := kibana_oapi.ConnectorConfigWithDefaults(connectorTypeID, thisString) + if err != nil { + diags.AddError("Failed to get connector config with defaults", err.Error()) + } + thatWithDefaults, err := kibana_oapi.ConnectorConfigWithDefaults(connectorTypeID, thatString) + if err != nil { + diags.AddError("Failed to get connector config with defaults", err.Error()) + } + + normalizedWithDefaults := jsontypes.NewNormalizedValue(thisWithDefaults) + normalizedThatWithDefaults := jsontypes.NewNormalizedValue(thatWithDefaults) + return normalizedWithDefaults.StringSemanticEquals(ctx, normalizedThatWithDefaults) +} + +// NewConfigNull creates a ConfigValue with a null value. Determine whether the value is null via IsNull method. +func NewConfigNull() ConfigValue { + return ConfigValue{ + Normalized: jsontypes.NewNormalizedNull(), + } +} + +// NewConfigUnknown creates a ConfigValue with an unknown value. Determine whether the value is unknown via IsUnknown method. +func NewConfigUnknown() ConfigValue { + return ConfigValue{ + Normalized: jsontypes.NewNormalizedUnknown(), + } +} + +const connectorTypeIDKey = "__tf_provider_connector_type_id" + +// NewConfigValueWithConnectorID creates a ConfigValue with a known value and a connector type ID. Access the value via ValueString method. +func NewConfigValueWithConnectorID(value string, connectorTypeID string) (ConfigValue, diag.Diagnostics) { + if value == "" { + return NewConfigNull(), nil + } + + var configMap map[string]interface{} + err := json.Unmarshal([]byte(value), &configMap) + if err != nil { + return ConfigValue{}, diag.Diagnostics{ + diag.NewErrorDiagnostic("Failed to unmarshal config", err.Error()), + } + } + + configMap[connectorTypeIDKey] = connectorTypeID + jsonBytes, err := json.Marshal(configMap) + if err != nil { + return ConfigValue{}, diag.Diagnostics{ + diag.NewErrorDiagnostic("Failed to marshal config", err.Error()), + } + } + + return ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(string(jsonBytes)), + connectorTypeID: connectorTypeID, + }, nil +} diff --git a/internal/kibana/connectors/config_value_test.go b/internal/kibana/connectors/config_value_test.go new file mode 100644 index 000000000..b651ad2bf --- /dev/null +++ b/internal/kibana/connectors/config_value_test.go @@ -0,0 +1,427 @@ +package connectors + +import ( + "context" + "encoding/json" + "strings" + "testing" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework/attr/xattr" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" + "github.com/stretchr/testify/require" +) + +func TestConfigValue_ValidateAttribute(t *testing.T) { + tests := []struct { + name string + configValue ConfigValue + expectError bool + errorContains string + }{ + { + name: "null value should not validate", + configValue: NewConfigNull(), + expectError: false, + }, + { + name: "unknown value should not validate", + configValue: NewConfigUnknown(), + expectError: false, + }, + { + name: "valid JSON value should validate successfully", + configValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(`{"key": "value"}`), + }, + expectError: false, + }, + { + name: "invalid JSON value should produce validation error", + configValue: ConfigValue{ + Normalized: func() jsontypes.Normalized { + // Create an invalid JSON by directly setting StringValue + return jsontypes.Normalized{StringValue: basetypes.NewStringValue(`{invalid json`)} + }(), + }, + expectError: true, + errorContains: "Invalid JSON String Value", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + req := xattr.ValidateAttributeRequest{ + Path: path.Root("config"), + } + resp := &xattr.ValidateAttributeResponse{} + + tt.configValue.ValidateAttribute(context.Background(), req, resp) + + if tt.expectError { + require.True(t, resp.Diagnostics.HasError(), "Expected validation error but got none") + if tt.errorContains != "" { + require.Contains(t, resp.Diagnostics.Errors()[0].Summary(), tt.errorContains) + } + } else { + require.False(t, resp.Diagnostics.HasError(), "Unexpected validation error: %v", resp.Diagnostics) + } + }) + } +} + +func TestConfigValue_SanitizedValue(t *testing.T) { + tests := []struct { + name string + configValue ConfigValue + expectedResult string + expectError bool + errorContains string + }{ + { + name: "null value returns empty string", + configValue: NewConfigNull(), + expectedResult: "", + expectError: false, + }, + { + name: "unknown value returns empty string", + configValue: NewConfigUnknown(), + expectedResult: "", + expectError: false, + }, + { + name: "JSON without connector type ID remains unchanged", + configValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(`{"key": "value", "another": "field"}`), + }, + expectedResult: `{"another":"field","key":"value"}`, + expectError: false, + }, + { + name: "JSON with connector type ID gets sanitized", + configValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(`{"key": "value", "__tf_provider_connector_type_id": "test-connector", "another": "field"}`), + connectorTypeID: "test-connector", + }, + expectedResult: `{"another":"field","key":"value"}`, + expectError: false, + }, + { + name: "empty JSON object", + configValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(`{}`), + }, + expectedResult: `{}`, + expectError: false, + }, + { + name: "invalid JSON should return error", + configValue: ConfigValue{ + Normalized: jsontypes.Normalized{StringValue: basetypes.NewStringValue(`{invalid json`)}, + }, + expectError: true, + errorContains: "Failed to unmarshal config value", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, diags := tt.configValue.SanitizedValue() + + if tt.expectError { + require.True(t, diags.HasError(), "Expected error but got none") + if tt.errorContains != "" { + require.Contains(t, diags.Errors()[0].Summary(), tt.errorContains) + } + } else { + require.False(t, diags.HasError(), "Unexpected error: %v", diags) + require.Equal(t, tt.expectedResult, result) + } + }) + } +} + +func TestConfigValue_StringSemanticEquals(t *testing.T) { + emailConnectorID := ".email" + emailConnectorConfig := `{"key": "value"}` + emailConnectorConfigWithDefaults, err := kibana_oapi.ConnectorConfigWithDefaults(emailConnectorID, emailConnectorConfig) + require.NoError(t, err) + + tests := []struct { + name string + configValue ConfigValue + otherValue basetypes.StringValuable + expectEqual bool + expectError bool + errorContains string + }{ + { + name: "null values are equal", + configValue: NewConfigNull(), + otherValue: NewConfigNull(), + expectEqual: true, + expectError: false, + }, + { + name: "unknown values are equal", + configValue: NewConfigUnknown(), + otherValue: NewConfigUnknown(), + expectEqual: true, + expectError: false, + }, + { + name: "null vs unknown should not be equal", + configValue: NewConfigNull(), + otherValue: NewConfigUnknown(), + expectEqual: false, + expectError: false, + }, + { + name: "wrong type should produce error", + configValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(`{"key": "value"}`), + }, + otherValue: basetypes.NewStringValue(`{"key": "value"}`), + expectEqual: false, + expectError: true, + errorContains: "Semantic Equality Check Error", + }, + { + name: "values without connector type ID should use normalized comparison", + configValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(`{"key": "value"}`), + connectorTypeID: "", + }, + otherValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(`{"key": "value"}`), + connectorTypeID: "", + }, + expectEqual: true, + expectError: false, + }, + { + name: "different values without connector type ID should not be equal", + configValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(`{"key": "value1"}`), + connectorTypeID: "", + }, + otherValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(`{"key": "value2"}`), + connectorTypeID: "", + }, + expectEqual: false, + expectError: false, + }, + { + name: "values with same connector type ID from first value", + configValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(emailConnectorConfig), + connectorTypeID: emailConnectorID, + }, + otherValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(emailConnectorConfigWithDefaults), + connectorTypeID: "", + }, + expectEqual: true, // Would be true if connector config with defaults works + expectError: false, + }, + { + name: "values with same connector type ID from second value", + configValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(emailConnectorConfigWithDefaults), + connectorTypeID: "", + }, + otherValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(emailConnectorConfig), + connectorTypeID: emailConnectorID, + }, + expectEqual: true, // Would be true if connector config with defaults works + expectError: false, + }, + { + name: "invalid JSON in first value should cause error", + configValue: ConfigValue{ + Normalized: jsontypes.Normalized{StringValue: basetypes.NewStringValue(`{invalid`)}, + connectorTypeID: "test-connector", + }, + otherValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(`{"key": "value"}`), + connectorTypeID: "test-connector", + }, + expectEqual: false, + expectError: true, + errorContains: "Failed to unmarshal config value", + }, + { + name: "invalid JSON in second value should cause error", + configValue: ConfigValue{ + Normalized: jsontypes.NewNormalizedValue(`{"key": "value"}`), + connectorTypeID: "test-connector", + }, + otherValue: ConfigValue{ + Normalized: jsontypes.Normalized{StringValue: basetypes.NewStringValue(`{invalid`)}, + connectorTypeID: "test-connector", + }, + expectEqual: false, + expectError: true, + errorContains: "Failed to unmarshal config value", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, diags := tt.configValue.StringSemanticEquals(context.Background(), tt.otherValue) + + if tt.expectError { + require.True(t, diags.HasError(), "Expected error but got none") + if tt.errorContains != "" { + errorFound := false + for _, err := range diags.Errors() { + if strings.Contains(err.Summary(), tt.errorContains) || strings.Contains(err.Detail(), tt.errorContains) { + errorFound = true + break + } + } + require.True(t, errorFound, "Expected error containing '%s' but got: %v", tt.errorContains, diags) + } + } else { + if diags.HasError() { + // For connector config with defaults errors, we might expect them in real scenarios + // but for unit tests, we'll be more lenient + hasConnectorError := false + for _, err := range diags.Errors() { + if strings.Contains(err.Summary(), "Failed to get connector config with defaults") { + hasConnectorError = true + break + } + } + if !hasConnectorError { + require.False(t, diags.HasError(), "Unexpected error: %v", diags) + } + } + require.Equal(t, tt.expectEqual, result) + } + }) + } +} + +func TestNewConfigValueWithConnectorID(t *testing.T) { + tests := []struct { + name string + value string + connectorTypeID string + expectError bool + errorContains string + validateResult func(t *testing.T, result ConfigValue) + }{ + { + name: "empty value returns null config", + value: "", + connectorTypeID: "test-connector", + expectError: false, + validateResult: func(t *testing.T, result ConfigValue) { + require.True(t, result.IsNull()) + require.Equal(t, "", result.connectorTypeID) + }, + }, + { + name: "valid JSON with connector type ID", + value: `{"key": "value"}`, + connectorTypeID: "test-connector", + expectError: false, + validateResult: func(t *testing.T, result ConfigValue) { + require.False(t, result.IsNull()) + require.Equal(t, "test-connector", result.connectorTypeID) + + // Check that the connector type ID was added to the JSON + var resultMap map[string]interface{} + err := json.Unmarshal([]byte(result.ValueString()), &resultMap) + require.NoError(t, err) + require.Equal(t, "test-connector", resultMap["__tf_provider_connector_type_id"]) + require.Equal(t, "value", resultMap["key"]) + }, + }, + { + name: "valid empty JSON object", + value: `{}`, + connectorTypeID: "test-connector", + expectError: false, + validateResult: func(t *testing.T, result ConfigValue) { + require.False(t, result.IsNull()) + require.Equal(t, "test-connector", result.connectorTypeID) + + var resultMap map[string]interface{} + err := json.Unmarshal([]byte(result.ValueString()), &resultMap) + require.NoError(t, err) + require.Equal(t, "test-connector", resultMap["__tf_provider_connector_type_id"]) + }, + }, + { + name: "complex JSON object", + value: `{"config": {"nested": "value"}, "array": [1, 2, 3]}`, + connectorTypeID: "complex-connector", + expectError: false, + validateResult: func(t *testing.T, result ConfigValue) { + require.False(t, result.IsNull()) + require.Equal(t, "complex-connector", result.connectorTypeID) + + var resultMap map[string]interface{} + err := json.Unmarshal([]byte(result.ValueString()), &resultMap) + require.NoError(t, err) + require.Equal(t, "complex-connector", resultMap["__tf_provider_connector_type_id"]) + + config, ok := resultMap["config"].(map[string]interface{}) + require.True(t, ok) + require.Equal(t, "value", config["nested"]) + + array, ok := resultMap["array"].([]interface{}) + require.True(t, ok) + require.Len(t, array, 3) + }, + }, + { + name: "invalid JSON should return error", + value: `{invalid json`, + connectorTypeID: "test-connector", + expectError: true, + errorContains: "Failed to unmarshal config", + }, + { + name: "empty connector type ID", + value: `{"key": "value"}`, + connectorTypeID: "", + expectError: false, + validateResult: func(t *testing.T, result ConfigValue) { + require.False(t, result.IsNull()) + require.Equal(t, "", result.connectorTypeID) + + var resultMap map[string]interface{} + err := json.Unmarshal([]byte(result.ValueString()), &resultMap) + require.NoError(t, err) + require.Equal(t, "", resultMap["__tf_provider_connector_type_id"]) + require.Equal(t, "value", resultMap["key"]) + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, diags := NewConfigValueWithConnectorID(tt.value, tt.connectorTypeID) + + if tt.expectError { + require.True(t, diags.HasError(), "Expected error but got none") + if tt.errorContains != "" { + require.Contains(t, diags.Errors()[0].Summary(), tt.errorContains) + } + } else { + require.False(t, diags.HasError(), "Unexpected error: %v", diags) + if tt.validateResult != nil { + tt.validateResult(t, result) + } + } + }) + } +} diff --git a/internal/kibana/connectors/create.go b/internal/kibana/connectors/create.go new file mode 100644 index 000000000..143bb9fb5 --- /dev/null +++ b/internal/kibana/connectors/create.go @@ -0,0 +1,81 @@ +package connectors + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func (r *Resource) Create(ctx context.Context, request resource.CreateRequest, response *resource.CreateResponse) { + var plan tfModel + + response.Diagnostics.Append(request.Plan.Get(ctx, &plan)...) + if response.Diagnostics.HasError() { + return + } + + client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, plan.KibanaConnection, r.client) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + oapiClient, err := client.GetKibanaOapiClient() + if err != nil { + response.Diagnostics.AddError("Failed to get Kibana client", err.Error()) + return + } + + apiModel, diags := plan.toAPIModel() + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + version, sdkDiags := client.ServerVersion(ctx) + response.Diagnostics.Append(utils.FrameworkDiagsFromSDK(sdkDiags)...) + if response.Diagnostics.HasError() { + return + } + + if apiModel.ConnectorID != "" && version.LessThan(MinVersionSupportingPreconfiguredIDs) { + response.Diagnostics.AddError( + "Unsupported Elastic Stack version", + "Preconfigured connector IDs are only supported for Elastic Stack v"+MinVersionSupportingPreconfiguredIDs.String()+" and above. Either remove the `connector_id` attribute or upgrade your target cluster to supported version", + ) + return + } + + connectorID, diags := kibana_oapi.CreateConnector(ctx, oapiClient, apiModel) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + compositeID := &clients.CompositeId{ClusterId: apiModel.SpaceID, ResourceId: connectorID} + plan.ID = types.StringValue(compositeID.String()) + + // Read the connector back to populate all computed fields + client, diags = clients.MaybeNewApiClientFromFrameworkResource(ctx, plan.KibanaConnection, r.client) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + exists, diags := r.readConnectorFromAPI(ctx, client, &plan) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + if !exists { + response.Diagnostics.AddError("Connector not found after creation", "The connector was created but could not be found afterward") + return + } + + response.Diagnostics.Append(response.State.Set(ctx, plan)...) +} diff --git a/internal/kibana/connectors/delete.go b/internal/kibana/connectors/delete.go new file mode 100644 index 000000000..9c498eb66 --- /dev/null +++ b/internal/kibana/connectors/delete.go @@ -0,0 +1,40 @@ +package connectors + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +func (r *Resource) Delete(ctx context.Context, request resource.DeleteRequest, response *resource.DeleteResponse) { + var state tfModel + + response.Diagnostics.Append(request.State.Get(ctx, &state)...) + if response.Diagnostics.HasError() { + return + } + + client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, state.KibanaConnection, r.client) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + oapiClient, err := client.GetKibanaOapiClient() + if err != nil { + response.Diagnostics.AddError("Failed to get Kibana client", err.Error()) + return + } + + compositeID, diags := state.GetID() + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + spaceId := state.SpaceID.ValueString() + + response.Diagnostics.Append(kibana_oapi.DeleteConnector(ctx, oapiClient, compositeID.ResourceId, spaceId)...) +} diff --git a/internal/kibana/connectors/models.go b/internal/kibana/connectors/models.go new file mode 100644 index 000000000..7adf834d7 --- /dev/null +++ b/internal/kibana/connectors/models.go @@ -0,0 +1,73 @@ +package connectors + +import ( + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/models" + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +type tfModel struct { + ID types.String `tfsdk:"id"` + KibanaConnection types.List `tfsdk:"kibana_connection"` + ConnectorID types.String `tfsdk:"connector_id"` + SpaceID types.String `tfsdk:"space_id"` + Name types.String `tfsdk:"name"` + ConnectorTypeID types.String `tfsdk:"connector_type_id"` + Config ConfigValue `tfsdk:"config"` + Secrets jsontypes.Normalized `tfsdk:"secrets"` + IsDeprecated types.Bool `tfsdk:"is_deprecated"` + IsMissingSecrets types.Bool `tfsdk:"is_missing_secrets"` + IsPreconfigured types.Bool `tfsdk:"is_preconfigured"` +} + +func (model tfModel) GetID() (*clients.CompositeId, diag.Diagnostics) { + compId, sdkDiags := clients.CompositeIdFromStr(model.ID.ValueString()) + if sdkDiags.HasError() { + return nil, utils.FrameworkDiagsFromSDK(sdkDiags) + } + + return compId, nil +} + +func (model tfModel) toAPIModel() (models.KibanaActionConnector, diag.Diagnostics) { + apiModel := models.KibanaActionConnector{ + ConnectorID: model.ConnectorID.ValueString(), + SpaceID: model.SpaceID.ValueString(), + Name: model.Name.ValueString(), + ConnectorTypeID: model.ConnectorTypeID.ValueString(), + } + + if utils.IsKnown(model.Config) { + apiModel.ConfigJSON = model.Config.ValueString() + } + + if utils.IsKnown(model.Secrets) { + apiModel.SecretsJSON = model.Secrets.ValueString() + } + + return apiModel, nil +} + +func (model *tfModel) populateFromAPI(apiModel *models.KibanaActionConnector, compositeID *clients.CompositeId) diag.Diagnostics { + model.ID = types.StringValue(compositeID.String()) + model.ConnectorID = types.StringValue(apiModel.ConnectorID) + model.SpaceID = types.StringValue(apiModel.SpaceID) + model.Name = types.StringValue(apiModel.Name) + model.ConnectorTypeID = types.StringValue(apiModel.ConnectorTypeID) + model.IsDeprecated = types.BoolValue(apiModel.IsDeprecated) + model.IsMissingSecrets = types.BoolValue(apiModel.IsMissingSecrets) + model.IsPreconfigured = types.BoolValue(apiModel.IsPreconfigured) + + if apiModel.ConfigJSON != "" { + var diags diag.Diagnostics + model.Config, diags = NewConfigValueWithConnectorID(apiModel.ConfigJSON, apiModel.ConnectorTypeID) + if diags.HasError() { + return diags + } + } + + return nil +} diff --git a/internal/kibana/connectors/read.go b/internal/kibana/connectors/read.go new file mode 100644 index 000000000..8c805fadb --- /dev/null +++ b/internal/kibana/connectors/read.go @@ -0,0 +1,69 @@ +package connectors + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/hashicorp/terraform-plugin-framework/diag" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +// readConnectorFromAPI fetches a connector from the API and populates the given model +// Returns true if the connector was found, false if it doesn't exist +func (r *Resource) readConnectorFromAPI(ctx context.Context, client *clients.ApiClient, model *tfModel) (bool, diag.Diagnostics) { + var diags diag.Diagnostics + + oapiClient, err := client.GetKibanaOapiClient() + if err != nil { + diags.AddError("Failed to get Kibana client", err.Error()) + return false, diags + } + + compositeID, diagsTemp := model.GetID() + diags.Append(diagsTemp...) + if diags.HasError() { + return false, diags + } + + connector, diagsTemp := kibana_oapi.GetConnector(ctx, oapiClient, compositeID.ResourceId, compositeID.ClusterId) + if connector == nil && diagsTemp == nil { + // Resource not found + return false, diags + } + diags.Append(diagsTemp...) + if diags.HasError() { + return false, diags + } + + diags.Append(model.populateFromAPI(connector, compositeID)...) + return true, diags +} + +func (r *Resource) Read(ctx context.Context, request resource.ReadRequest, response *resource.ReadResponse) { + var state tfModel + + response.Diagnostics.Append(request.State.Get(ctx, &state)...) + if response.Diagnostics.HasError() { + return + } + + client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, state.KibanaConnection, r.client) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + exists, diags := r.readConnectorFromAPI(ctx, client, &state) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + if !exists { + response.State.RemoveResource(ctx) + return + } + + response.Diagnostics.Append(response.State.Set(ctx, &state)...) +} diff --git a/internal/kibana/connectors/resource.go b/internal/kibana/connectors/resource.go new file mode 100644 index 000000000..becb1f0d7 --- /dev/null +++ b/internal/kibana/connectors/resource.go @@ -0,0 +1,37 @@ +package connectors + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/hashicorp/go-version" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" +) + +// Ensure provider defined types fully satisfy framework interfaces +var _ resource.Resource = &Resource{} +var _ resource.ResourceWithConfigure = &Resource{} +var _ resource.ResourceWithImportState = &Resource{} + +var ( + MinVersionSupportingPreconfiguredIDs = version.Must(version.NewVersion("8.8.0")) +) + +type Resource struct { + client *clients.ApiClient +} + +func (r *Resource) Configure(ctx context.Context, request resource.ConfigureRequest, response *resource.ConfigureResponse) { + client, diags := clients.ConvertProviderData(request.ProviderData) + response.Diagnostics.Append(diags...) + r.client = client +} + +func (r *Resource) Metadata(ctx context.Context, request resource.MetadataRequest, response *resource.MetadataResponse) { + response.TypeName = request.ProviderTypeName + "_kibana_action_connector" +} + +func (r *Resource) ImportState(ctx context.Context, request resource.ImportStateRequest, response *resource.ImportStateResponse) { + response.Diagnostics.Append(response.State.SetAttribute(ctx, path.Root("id"), request.ID)...) +} diff --git a/internal/kibana/connectors/schema.go b/internal/kibana/connectors/schema.go new file mode 100644 index 000000000..2fd185b77 --- /dev/null +++ b/internal/kibana/connectors/schema.go @@ -0,0 +1,90 @@ +package connectors + +import ( + "context" + + "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringdefault" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + + providerschema "github.com/elastic/terraform-provider-elasticstack/internal/schema" + "github.com/elastic/terraform-provider-elasticstack/internal/utils/validators" +) + +func (r *Resource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + Description: "Creates a Kibana action connector. See https://www.elastic.co/guide/en/kibana/current/action-types.html", + Blocks: map[string]schema.Block{ + "kibana_connection": providerschema.GetKbFWConnectionBlock(), + }, + Attributes: map[string]schema.Attribute{ + "id": schema.StringAttribute{ + Description: "Internal identifier of the resource.", + Computed: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.UseStateForUnknown(), + }, + }, + "connector_id": schema.StringAttribute{ + Description: "A UUID v1 or v4 to use instead of a randomly generated ID.", + Computed: true, + Optional: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + stringplanmodifier.UseStateForUnknown(), + }, + Validators: []validator.String{ + validators.IsUUID(), + }, + }, + "space_id": schema.StringAttribute{ + Description: "An identifier for the space. If space_id is not provided, the default space is used.", + Optional: true, + Computed: true, + Default: stringdefault.StaticString("default"), + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "name": schema.StringAttribute{ + Description: "The name of the connector. While this name does not have to be unique, a distinctive name can help you identify a connector.", + Required: true, + }, + "connector_type_id": schema.StringAttribute{ + Description: "The ID of the connector type, e.g. `.index`.", + Required: true, + PlanModifiers: []planmodifier.String{ + stringplanmodifier.RequiresReplace(), + }, + }, + "config": schema.StringAttribute{ + CustomType: ConfigType{}, + Description: "The configuration for the connector. Configuration properties vary depending on the connector type.", + Optional: true, + Computed: true, + }, + "secrets": schema.StringAttribute{ + CustomType: jsontypes.NormalizedType{}, + Description: "The secrets configuration for the connector. Secrets configuration properties vary depending on the connector type.", + Optional: true, + Sensitive: true, + }, + "is_deprecated": schema.BoolAttribute{ + Description: "Indicates whether the connector type is deprecated.", + Computed: true, + }, + "is_missing_secrets": schema.BoolAttribute{ + Description: "Indicates whether secrets are missing for the connector.", + Computed: true, + }, + "is_preconfigured": schema.BoolAttribute{ + Description: "Indicates whether it is a preconfigured connector.", + Computed: true, + }, + }, + } +} diff --git a/internal/kibana/connectors/update.go b/internal/kibana/connectors/update.go new file mode 100644 index 000000000..07e3b689f --- /dev/null +++ b/internal/kibana/connectors/update.go @@ -0,0 +1,73 @@ +package connectors + +import ( + "context" + + "github.com/elastic/terraform-provider-elasticstack/internal/clients" + "github.com/elastic/terraform-provider-elasticstack/internal/clients/kibana_oapi" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func (r *Resource) Update(ctx context.Context, request resource.UpdateRequest, response *resource.UpdateResponse) { + var plan tfModel + + response.Diagnostics.Append(request.Plan.Get(ctx, &plan)...) + if response.Diagnostics.HasError() { + return + } + + client, diags := clients.MaybeNewApiClientFromFrameworkResource(ctx, plan.KibanaConnection, r.client) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + oapiClient, err := client.GetKibanaOapiClient() + if err != nil { + response.Diagnostics.AddError("Failed to get Kibana client", err.Error()) + return + } + + apiModel, diags := plan.toAPIModel() + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + compositeID, diags := plan.GetID() + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + apiModel.ConnectorID = compositeID.ResourceId + + connectorID, diags := kibana_oapi.UpdateConnector(ctx, oapiClient, apiModel) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + newCompositeID := &clients.CompositeId{ClusterId: apiModel.SpaceID, ResourceId: connectorID} + plan.ID = types.StringValue(newCompositeID.String()) + + // Read the connector back to populate all computed fields + client, diags = clients.MaybeNewApiClientFromFrameworkResource(ctx, plan.KibanaConnection, r.client) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + exists, diags := r.readConnectorFromAPI(ctx, client, &plan) + response.Diagnostics.Append(diags...) + if response.Diagnostics.HasError() { + return + } + + if !exists { + response.Diagnostics.AddError("Connector not found after update", "The connector was updated but could not be found afterward") + return + } + + response.Diagnostics.Append(response.State.Set(ctx, plan)...) +} diff --git a/internal/utils/diag.go b/internal/utils/diag.go index 47d48914c..1a8093747 100644 --- a/internal/utils/diag.go +++ b/internal/utils/diag.go @@ -94,6 +94,32 @@ func FrameworkDiagsFromSDK(sdkDiags sdkdiag.Diagnostics) fwdiag.Diagnostics { return diags } +func SDKDiagsFromFramework(fwDiags fwdiag.Diagnostics) sdkdiag.Diagnostics { + var diags sdkdiag.Diagnostics + + for _, fwDiag := range fwDiags { + var sdkDiag sdkdiag.Diagnostic + + if fwDiag.Severity() == fwdiag.SeverityError { + sdkDiag = sdkdiag.Diagnostic{ + Severity: sdkdiag.Error, + Summary: fwDiag.Summary(), + Detail: fwDiag.Detail(), + } + } else { + sdkDiag = sdkdiag.Diagnostic{ + Severity: sdkdiag.Warning, + Summary: fwDiag.Summary(), + Detail: fwDiag.Detail(), + } + } + + diags = append(diags, sdkDiag) + } + + return diags +} + func FrameworkDiagFromError(err error) fwdiag.Diagnostics { if err == nil { return nil diff --git a/internal/utils/validators/is_uuid.go b/internal/utils/validators/is_uuid.go new file mode 100644 index 000000000..cb1fcd473 --- /dev/null +++ b/internal/utils/validators/is_uuid.go @@ -0,0 +1,41 @@ +package validators + +import ( + "context" + "fmt" + + "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/hashicorp/go-uuid" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" +) + +type uuidValidator struct{} + +func IsUUID() validator.String { + return uuidValidator{} +} + +func (v uuidValidator) ValidateString(_ context.Context, req validator.StringRequest, resp *validator.StringResponse) { + if !utils.IsKnown(req.ConfigValue) { + return + } + + _, err := uuid.ParseUUID(req.ConfigValue.ValueString()) + if err == nil { + return + } + + resp.Diagnostics.AddAttributeError( + req.Path, + "Invalid UUID", + fmt.Sprintf("Expected a valid UUID, got %s. Parsing error: %v", req.ConfigValue.ValueString(), err), + ) +} + +func (v uuidValidator) Description(_ context.Context) string { + return "value must be a valid UUID in RFC 4122 format" +} + +func (v uuidValidator) MarkdownDescription(ctx context.Context) string { + return v.Description(ctx) +} diff --git a/internal/utils/validators/is_uuid_test.go b/internal/utils/validators/is_uuid_test.go new file mode 100644 index 000000000..1a282ab30 --- /dev/null +++ b/internal/utils/validators/is_uuid_test.go @@ -0,0 +1,156 @@ +package validators + +import ( + "context" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stretchr/testify/require" +) + +func TestIsUUID_ValidateString(t *testing.T) { + tests := []struct { + name string + value types.String + expectError bool + errorText string + }{ + { + name: "null value should not validate", + value: types.StringNull(), + expectError: false, + }, + { + name: "unknown value should not validate", + value: types.StringUnknown(), + expectError: false, + }, + { + name: "valid UUID v4 should pass", + value: types.StringValue("550e8400-e29b-41d4-a716-446655440000"), + expectError: false, + }, + { + name: "valid UUID v1 should pass", + value: types.StringValue("6ba7b810-9dad-11d1-80b4-00c04fd430c8"), + expectError: false, + }, + { + name: "valid UUID v4 with uppercase should pass", + value: types.StringValue("550E8400-E29B-41D4-A716-446655440000"), + expectError: false, + }, + { + name: "valid UUID v4 with mixed case should pass", + value: types.StringValue("550e8400-E29B-41d4-A716-446655440000"), + expectError: false, + }, + { + name: "nil UUID should pass", + value: types.StringValue("00000000-0000-0000-0000-000000000000"), + expectError: false, + }, + { + name: "empty string should fail", + value: types.StringValue(""), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "string without hyphens should fail", + value: types.StringValue("550e8400e29b41d4a716446655440000"), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "UUID with wrong number of hyphens should fail", + value: types.StringValue("550e8400-e29b-41d4-a716-44665544-0000"), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "UUID with too many characters should fail", + value: types.StringValue("550e8400-e29b-41d4-a716-4466554400001"), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "UUID with too few characters should fail", + value: types.StringValue("550e8400-e29b-41d4-a716-44665544000"), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "UUID with invalid characters should fail", + value: types.StringValue("550e8400-e29b-41d4-a716-44665544000g"), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "UUID with spaces should fail", + value: types.StringValue("550e8400-e29b-41d4-a716-446655440000 "), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "UUID with leading spaces should fail", + value: types.StringValue(" 550e8400-e29b-41d4-a716-446655440000"), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "completely invalid string should fail", + value: types.StringValue("not-a-uuid-at-all"), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "numeric string should fail", + value: types.StringValue("123456789012345678901234567890123456"), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "UUID with wrong hyphen positions should fail", + value: types.StringValue("550e84-00e2-9b41d4-a716-446655440000"), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "UUID with missing segments should fail", + value: types.StringValue("550e8400--41d4-a716-446655440000"), + expectError: true, + errorText: "Invalid UUID", + }, + { + name: "valid UUID with curly braces should fail (RFC format required)", + value: types.StringValue("{550e8400-e29b-41d4-a716-446655440000}"), + expectError: true, + errorText: "Invalid UUID", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + v := IsUUID() + req := validator.StringRequest{ + Path: path.Root("test"), + PathExpression: path.MatchRoot("test"), + ConfigValue: tt.value, + } + resp := &validator.StringResponse{} + + v.ValidateString(context.Background(), req, resp) + + if tt.expectError { + require.True(t, resp.Diagnostics.HasError(), "Expected validation error but got none") + require.Contains(t, resp.Diagnostics.Errors()[0].Summary(), tt.errorText) + require.Contains(t, resp.Diagnostics.Errors()[0].Detail(), tt.value.ValueString()) + } else { + require.False(t, resp.Diagnostics.HasError(), "Unexpected validation error: %v", resp.Diagnostics) + } + }) + } +} diff --git a/provider/plugin_framework.go b/provider/plugin_framework.go index 4da1e743d..bba736241 100644 --- a/provider/plugin_framework.go +++ b/provider/plugin_framework.go @@ -20,6 +20,7 @@ import ( "github.com/elastic/terraform-provider-elasticstack/internal/fleet/integration_policy" "github.com/elastic/terraform-provider-elasticstack/internal/fleet/output" "github.com/elastic/terraform-provider-elasticstack/internal/fleet/server_host" + "github.com/elastic/terraform-provider-elasticstack/internal/kibana/connectors" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/data_view" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/import_saved_objects" "github.com/elastic/terraform-provider-elasticstack/internal/kibana/maintenance_window" @@ -105,6 +106,7 @@ func (p *Provider) Resources(ctx context.Context) []func() resource.Resource { func() resource.Resource { return &synthetics.Resource{} }, func() resource.Resource { return &api_key.Resource{} }, func() resource.Resource { return &data_stream_lifecycle.Resource{} }, + func() resource.Resource { return &connectors.Resource{} }, agent_policy.NewResource, integration.NewResource, integration_policy.NewResource, diff --git a/provider/provider.go b/provider/provider.go index 10291a2c9..3e78eee50 100644 --- a/provider/provider.go +++ b/provider/provider.go @@ -96,11 +96,10 @@ func New(version string) *schema.Provider { "elasticstack_elasticsearch_transform": transform.ResourceTransform(), "elasticstack_elasticsearch_watch": watcher.ResourceWatch(), - "elasticstack_kibana_alerting_rule": kibana.ResourceAlertingRule(), - "elasticstack_kibana_space": kibana.ResourceSpace(), - "elasticstack_kibana_action_connector": kibana.ResourceActionConnector(), - "elasticstack_kibana_security_role": kibana.ResourceRole(), - "elasticstack_kibana_slo": kibana.ResourceSlo(), + "elasticstack_kibana_alerting_rule": kibana.ResourceAlertingRule(), + "elasticstack_kibana_space": kibana.ResourceSpace(), + "elasticstack_kibana_security_role": kibana.ResourceRole(), + "elasticstack_kibana_slo": kibana.ResourceSlo(), }, } From 8d86de65ad36d5dc51d67e9adcef9e42c5033f01 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Thu, 11 Sep 2025 08:03:45 +1000 Subject: [PATCH 50/66] Add inactivity_timeout support to Fleet agent policy resource with duration string type and version validation (#1285) * Initial plan * Add inactivity_timeout support to Fleet agent policy resource Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Update changelog and generate documentation for inactivity_timeout feature Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Add version check for inactivity_timeout field in Fleet agent policy - Added MinVersionInactivityTimeout constant for 8.7.0 - Added SupportsInactivityTimeout field to features struct - Updated buildFeatures to check for inactivity timeout support - Added version validation in toAPICreateModel and toAPIUpdateModel - Updated test to use proper version check for inactivity_timeout Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Add comprehensive tests for inactivity_timeout version validation - Added unit tests to verify MinVersionInactivityTimeout constant - Added comprehensive validation tests for version checking logic - Verified version validation works for both create and update operations - Ensured null/unset inactivity_timeout values don't trigger validation errors Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Fix inactivity_timeout and supports_agentless field handling in Fleet agent policy Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Convert inactivity_timeout to string duration type with seconds conversion Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Update documentation for inactivity_timeout duration string support Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Apply code review suggestions: improve duration conversion and simplify test helpers Co-authored-by: tobio <444668+tobio@users.noreply.github.com> --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: tobio <444668+tobio@users.noreply.github.com> --- CHANGELOG.md | 1 + docs/resources/fleet_agent_policy.md | 1 + internal/fleet/agent_policy/models.go | 125 +++++++++++++------ internal/fleet/agent_policy/resource.go | 7 ++ internal/fleet/agent_policy/resource_test.go | 37 ++++++ internal/fleet/agent_policy/schema.go | 7 ++ internal/fleet/agent_policy/version_test.go | 106 ++++++++++++++++ 7 files changed, 247 insertions(+), 37 deletions(-) create mode 100644 internal/fleet/agent_policy/version_test.go diff --git a/CHANGELOG.md b/CHANGELOG.md index d58890df0..9bfc460cd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,6 +12,7 @@ - Support setting an explit `connector_id` in `elasticstack_kibana_action_connector`. This attribute already existed, but was being ignored by the provider. Setting the attribute will return an error in Elastic Stack v8.8 and lower since creating a connector with an explicit ID is not supported. ([1260](https://github.com/elastic/terraform-provider-elasticstack/pull/1260)) - Migrate `elasticstack_kibana_action_connector` to the Terraform plugin framework ([#1269](https://github.com/elastic/terraform-provider-elasticstack/pull/1269)) - Migrate `elasticstack_elasticsearch_security_role_mapping` resource and data source to Terraform Plugin Framework ([#1279](https://github.com/elastic/terraform-provider-elasticstack/pull/1279)) +- Add support for `inactivity_timeout` in `elasticstack_fleet_agent_policy` ([#641](https://github.com/elastic/terraform-provider-elasticstack/issues/641)) ## [0.11.17] - 2025-07-21 diff --git a/docs/resources/fleet_agent_policy.md b/docs/resources/fleet_agent_policy.md index 9eb4d4ea7..d8252b5b2 100644 --- a/docs/resources/fleet_agent_policy.md +++ b/docs/resources/fleet_agent_policy.md @@ -51,6 +51,7 @@ resource "elasticstack_fleet_agent_policy" "test_policy" { - `download_source_id` (String) The identifier for the Elastic Agent binary download server. - `fleet_server_host_id` (String) The identifier for the Fleet server host. - `global_data_tags` (Attributes Map) User-defined data tags to apply to all inputs. Values can be strings (string_value) or numbers (number_value) but not both. Example -- key1 = {string_value = value1}, key2 = {number_value = 42} (see [below for nested schema](#nestedatt--global_data_tags)) +- `inactivity_timeout` (String) The inactivity timeout for the agent policy. If an agent does not report within this time period, it will be considered inactive. Supports duration strings (e.g., '30s', '2m', '1h'). - `monitor_logs` (Boolean) Enable collection of agent logs. - `monitor_metrics` (Boolean) Enable collection of agent metrics. - `monitoring_output_id` (String) The identifier for monitoring output. diff --git a/internal/fleet/agent_policy/models.go b/internal/fleet/agent_policy/models.go index 717466e14..4997d08ba 100644 --- a/internal/fleet/agent_policy/models.go +++ b/internal/fleet/agent_policy/models.go @@ -4,9 +4,11 @@ import ( "context" "fmt" "slices" + "time" "github.com/elastic/terraform-provider-elasticstack/generated/kbapi" "github.com/elastic/terraform-provider-elasticstack/internal/utils" + "github.com/elastic/terraform-provider-elasticstack/internal/utils/customtypes" "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/diag" @@ -17,6 +19,7 @@ import ( type features struct { SupportsGlobalDataTags bool SupportsSupportsAgentless bool + SupportsInactivityTimeout bool } type globalDataTagsItemModel struct { @@ -25,21 +28,22 @@ type globalDataTagsItemModel struct { } type agentPolicyModel struct { - ID types.String `tfsdk:"id"` - PolicyID types.String `tfsdk:"policy_id"` - Name types.String `tfsdk:"name"` - Namespace types.String `tfsdk:"namespace"` - Description types.String `tfsdk:"description"` - DataOutputId types.String `tfsdk:"data_output_id"` - MonitoringOutputId types.String `tfsdk:"monitoring_output_id"` - FleetServerHostId types.String `tfsdk:"fleet_server_host_id"` - DownloadSourceId types.String `tfsdk:"download_source_id"` - MonitorLogs types.Bool `tfsdk:"monitor_logs"` - MonitorMetrics types.Bool `tfsdk:"monitor_metrics"` - SysMonitoring types.Bool `tfsdk:"sys_monitoring"` - SkipDestroy types.Bool `tfsdk:"skip_destroy"` - SupportsAgentless types.Bool `tfsdk:"supports_agentless"` - GlobalDataTags types.Map `tfsdk:"global_data_tags"` //> globalDataTagsModel + ID types.String `tfsdk:"id"` + PolicyID types.String `tfsdk:"policy_id"` + Name types.String `tfsdk:"name"` + Namespace types.String `tfsdk:"namespace"` + Description types.String `tfsdk:"description"` + DataOutputId types.String `tfsdk:"data_output_id"` + MonitoringOutputId types.String `tfsdk:"monitoring_output_id"` + FleetServerHostId types.String `tfsdk:"fleet_server_host_id"` + DownloadSourceId types.String `tfsdk:"download_source_id"` + MonitorLogs types.Bool `tfsdk:"monitor_logs"` + MonitorMetrics types.Bool `tfsdk:"monitor_metrics"` + SysMonitoring types.Bool `tfsdk:"sys_monitoring"` + SkipDestroy types.Bool `tfsdk:"skip_destroy"` + SupportsAgentless types.Bool `tfsdk:"supports_agentless"` + InactivityTimeout customtypes.Duration `tfsdk:"inactivity_timeout"` + GlobalDataTags types.Map `tfsdk:"global_data_tags"` //> globalDataTagsModel } func (model *agentPolicyModel) populateFromAPI(ctx context.Context, data *kbapi.AgentPolicy) diag.Diagnostics { @@ -73,6 +77,13 @@ func (model *agentPolicyModel) populateFromAPI(ctx context.Context, data *kbapi. model.Name = types.StringValue(data.Name) model.Namespace = types.StringValue(data.Namespace) model.SupportsAgentless = types.BoolPointerValue(data.SupportsAgentless) + if data.InactivityTimeout != nil { + // Convert seconds to duration string + d := time.Duration(*data.InactivityTimeout * float32(time.Second)).Truncate(time.Second) + model.InactivityTimeout = customtypes.NewDurationValue(d.String()) + } else { + model.InactivityTimeout = customtypes.NewDurationNull() + } if utils.Deref(data.GlobalDataTags) != nil { diags := diag.Diagnostics{} var map0 = make(map[string]globalDataTagsItemModel) @@ -162,16 +173,6 @@ func (model *agentPolicyModel) toAPICreateModel(ctx context.Context, feat featur monitoring = append(monitoring, kbapi.PostFleetAgentPoliciesJSONBodyMonitoringEnabledMetrics) } - if utils.IsKnown(model.SupportsAgentless) && !feat.SupportsSupportsAgentless { - return kbapi.PostFleetAgentPoliciesJSONRequestBody{}, diag.Diagnostics{ - diag.NewAttributeErrorDiagnostic( - path.Root("supports_agentless"), - "Unsupported Elasticsearch version", - fmt.Sprintf("Supports agentless is only supported in Elastic Stack %s and above", MinSupportsAgentlessVersion), - ), - } - } - body := kbapi.PostFleetAgentPoliciesJSONRequestBody{ DataOutputId: model.DataOutputId.ValueStringPointer(), Description: model.Description.ValueStringPointer(), @@ -182,7 +183,37 @@ func (model *agentPolicyModel) toAPICreateModel(ctx context.Context, feat featur MonitoringOutputId: model.MonitoringOutputId.ValueStringPointer(), Name: model.Name.ValueString(), Namespace: model.Namespace.ValueString(), - SupportsAgentless: model.SupportsAgentless.ValueBoolPointer(), + } + + if utils.IsKnown(model.SupportsAgentless) { + if !feat.SupportsSupportsAgentless { + return kbapi.PostFleetAgentPoliciesJSONRequestBody{}, diag.Diagnostics{ + diag.NewAttributeErrorDiagnostic( + path.Root("supports_agentless"), + "Unsupported Elasticsearch version", + fmt.Sprintf("Supports agentless is only supported in Elastic Stack %s and above", MinSupportsAgentlessVersion), + ), + } + } + body.SupportsAgentless = model.SupportsAgentless.ValueBoolPointer() + } + + if utils.IsKnown(model.InactivityTimeout) { + if !feat.SupportsInactivityTimeout { + return kbapi.PostFleetAgentPoliciesJSONRequestBody{}, diag.Diagnostics{ + diag.NewAttributeErrorDiagnostic( + path.Root("inactivity_timeout"), + "Unsupported Elasticsearch version", + fmt.Sprintf("Inactivity timeout is only supported in Elastic Stack %s and above", MinVersionInactivityTimeout), + ), + } + } + duration, diags := model.InactivityTimeout.Parse() + if diags.HasError() { + return kbapi.PostFleetAgentPoliciesJSONRequestBody{}, diags + } + seconds := float32(duration.Seconds()) + body.InactivityTimeout = &seconds } tags, diags := model.convertGlobalDataTags(ctx, feat) @@ -203,16 +234,6 @@ func (model *agentPolicyModel) toAPIUpdateModel(ctx context.Context, feat featur monitoring = append(monitoring, kbapi.PutFleetAgentPoliciesAgentpolicyidJSONBodyMonitoringEnabledMetrics) } - if utils.IsKnown(model.SupportsAgentless) && !feat.SupportsSupportsAgentless { - return kbapi.PutFleetAgentPoliciesAgentpolicyidJSONRequestBody{}, diag.Diagnostics{ - diag.NewAttributeErrorDiagnostic( - path.Root("supports_agentless"), - "Unsupported Elasticsearch version", - fmt.Sprintf("Supports agentless is only supported in Elastic Stack %s and above", MinSupportsAgentlessVersion), - ), - } - } - body := kbapi.PutFleetAgentPoliciesAgentpolicyidJSONRequestBody{ DataOutputId: model.DataOutputId.ValueStringPointer(), Description: model.Description.ValueStringPointer(), @@ -222,7 +243,37 @@ func (model *agentPolicyModel) toAPIUpdateModel(ctx context.Context, feat featur MonitoringOutputId: model.MonitoringOutputId.ValueStringPointer(), Name: model.Name.ValueString(), Namespace: model.Namespace.ValueString(), - SupportsAgentless: model.SupportsAgentless.ValueBoolPointer(), + } + + if utils.IsKnown(model.SupportsAgentless) { + if !feat.SupportsSupportsAgentless { + return kbapi.PutFleetAgentPoliciesAgentpolicyidJSONRequestBody{}, diag.Diagnostics{ + diag.NewAttributeErrorDiagnostic( + path.Root("supports_agentless"), + "Unsupported Elasticsearch version", + fmt.Sprintf("Supports agentless is only supported in Elastic Stack %s and above", MinSupportsAgentlessVersion), + ), + } + } + body.SupportsAgentless = model.SupportsAgentless.ValueBoolPointer() + } + + if utils.IsKnown(model.InactivityTimeout) { + if !feat.SupportsInactivityTimeout { + return kbapi.PutFleetAgentPoliciesAgentpolicyidJSONRequestBody{}, diag.Diagnostics{ + diag.NewAttributeErrorDiagnostic( + path.Root("inactivity_timeout"), + "Unsupported Elasticsearch version", + fmt.Sprintf("Inactivity timeout is only supported in Elastic Stack %s and above", MinVersionInactivityTimeout), + ), + } + } + duration, diags := model.InactivityTimeout.Parse() + if diags.HasError() { + return kbapi.PutFleetAgentPoliciesAgentpolicyidJSONRequestBody{}, diags + } + seconds := float32(duration.Seconds()) + body.InactivityTimeout = &seconds } tags, diags := model.convertGlobalDataTags(ctx, feat) diff --git a/internal/fleet/agent_policy/resource.go b/internal/fleet/agent_policy/resource.go index 34e4a3cf4..fbeda8d50 100644 --- a/internal/fleet/agent_policy/resource.go +++ b/internal/fleet/agent_policy/resource.go @@ -21,6 +21,7 @@ var ( var ( MinVersionGlobalDataTags = version.Must(version.NewVersion("8.15.0")) MinSupportsAgentlessVersion = version.Must(version.NewVersion("8.15.0")) + MinVersionInactivityTimeout = version.Must(version.NewVersion("8.7.0")) ) // NewResource is a helper function to simplify the provider implementation. @@ -57,8 +58,14 @@ func (r *agentPolicyResource) buildFeatures(ctx context.Context) (features, diag return features{}, utils.FrameworkDiagsFromSDK(diags) } + supportsInactivityTimeout, diags := r.client.EnforceMinVersion(ctx, MinVersionInactivityTimeout) + if diags.HasError() { + return features{}, utils.FrameworkDiagsFromSDK(diags) + } + return features{ SupportsGlobalDataTags: supportsGDT, SupportsSupportsAgentless: supportsSupportsAgentless, + SupportsInactivityTimeout: supportsInactivityTimeout, }, nil } diff --git a/internal/fleet/agent_policy/resource_test.go b/internal/fleet/agent_policy/resource_test.go index 8f90217f4..acd184210 100644 --- a/internal/fleet/agent_policy/resource_test.go +++ b/internal/fleet/agent_policy/resource_test.go @@ -143,6 +143,19 @@ func TestAccResourceAgentPolicy(t *testing.T) { ImportStateVerify: true, ImportStateVerifyIgnore: []string{"skip_destroy"}, }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(agent_policy.MinVersionInactivityTimeout), + Config: testAccResourceAgentPolicyCreateWithInactivityTimeout(policyName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "name", fmt.Sprintf("Policy %s", policyName)), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "namespace", "default"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "description", "Test Agent Policy with Inactivity Timeout"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "monitor_logs", "true"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "monitor_metrics", "false"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "skip_destroy", "false"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "inactivity_timeout", "2m"), + ), + }, { SkipFunc: versionutils.CheckIfVersionIsUnsupported(agent_policy.MinVersionGlobalDataTags), Config: testAccResourceAgentPolicyCreateWithGlobalDataTags(policyNameGlobalDataTags, false), @@ -295,6 +308,30 @@ data "elasticstack_fleet_enrollment_tokens" "test_policy" { `, fmt.Sprintf("Policy %s", id), skipDestroy) } +func testAccResourceAgentPolicyCreateWithInactivityTimeout(id string) string { + return fmt.Sprintf(` +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +resource "elasticstack_fleet_agent_policy" "test_policy" { + name = "%s" + namespace = "default" + description = "Test Agent Policy with Inactivity Timeout" + monitor_logs = true + monitor_metrics = false + skip_destroy = false + inactivity_timeout = "2m" +} + +data "elasticstack_fleet_enrollment_tokens" "test_policy" { + policy_id = elasticstack_fleet_agent_policy.test_policy.policy_id +} + +`, fmt.Sprintf("Policy %s", id)) +} + func testAccResourceAgentPolicyCreateWithBadGlobalDataTags(id string, skipDestroy bool) string { return fmt.Sprintf(` provider "elasticstack" { diff --git a/internal/fleet/agent_policy/schema.go b/internal/fleet/agent_policy/schema.go index f1c531a29..be49763ef 100644 --- a/internal/fleet/agent_policy/schema.go +++ b/internal/fleet/agent_policy/schema.go @@ -3,6 +3,7 @@ package agent_policy import ( "context" + "github.com/elastic/terraform-provider-elasticstack/internal/utils/customtypes" "github.com/hashicorp/terraform-plugin-framework-validators/float32validator" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/attr" @@ -97,6 +98,12 @@ func getSchema() schema.Schema { boolplanmodifier.RequiresReplace(), }, }, + "inactivity_timeout": schema.StringAttribute{ + Description: "The inactivity timeout for the agent policy. If an agent does not report within this time period, it will be considered inactive. Supports duration strings (e.g., '30s', '2m', '1h').", + Computed: true, + Optional: true, + CustomType: customtypes.DurationType{}, + }, "global_data_tags": schema.MapNestedAttribute{ Description: "User-defined data tags to apply to all inputs. Values can be strings (string_value) or numbers (number_value) but not both. Example -- key1 = {string_value = value1}, key2 = {number_value = 42}", NestedObject: schema.NestedAttributeObject{ diff --git a/internal/fleet/agent_policy/version_test.go b/internal/fleet/agent_policy/version_test.go new file mode 100644 index 000000000..8cdc0d133 --- /dev/null +++ b/internal/fleet/agent_policy/version_test.go @@ -0,0 +1,106 @@ +package agent_policy + +import ( + "context" + "github.com/elastic/terraform-provider-elasticstack/internal/utils/customtypes" + "github.com/hashicorp/go-version" + "github.com/hashicorp/terraform-plugin-framework/types" + "testing" +) + +func TestMinVersionInactivityTimeout(t *testing.T) { + // Test that the MinVersionInactivityTimeout constant is set correctly + expected := "8.7.0" + actual := MinVersionInactivityTimeout.String() + if actual != expected { + t.Errorf("Expected MinVersionInactivityTimeout to be '%s', got '%s'", expected, actual) + } + + // Test version comparison - should be greater than 8.6.0 + olderVersion := version.Must(version.NewVersion("8.6.0")) + if MinVersionInactivityTimeout.LessThan(olderVersion) { + t.Errorf("MinVersionInactivityTimeout (%s) should be greater than %s", MinVersionInactivityTimeout.String(), olderVersion.String()) + } + + // Test version comparison - should be less than 8.8.0 + newerVersion := version.Must(version.NewVersion("8.8.0")) + if MinVersionInactivityTimeout.GreaterThan(newerVersion) { + t.Errorf("MinVersionInactivityTimeout (%s) should be less than %s", MinVersionInactivityTimeout.String(), newerVersion.String()) + } +} + +func TestInactivityTimeoutVersionValidation(t *testing.T) { + ctx := context.Background() + + // Test case where inactivity_timeout is not supported (older version) + model := &agentPolicyModel{ + Name: types.StringValue("test"), + Namespace: types.StringValue("default"), + InactivityTimeout: customtypes.NewDurationValue("2m"), + } + + // Create features with inactivity timeout NOT supported + feat := features{ + SupportsInactivityTimeout: false, + } + + // Test toAPICreateModel - should return error when inactivity_timeout is used but not supported + _, diags := model.toAPICreateModel(ctx, feat) + if !diags.HasError() { + t.Error("Expected error when using inactivity_timeout on unsupported version, but got none") + } + + // Check that the error message contains the expected text + found := false + for _, diag := range diags { + if diag.Summary() == "Unsupported Elasticsearch version" { + found = true + break + } + } + if !found { + t.Error("Expected 'Unsupported Elasticsearch version' error, but didn't find it") + } + + // Test toAPIUpdateModel - should return error when inactivity_timeout is used but not supported + _, diags = model.toAPIUpdateModel(ctx, feat) + if !diags.HasError() { + t.Error("Expected error when using inactivity_timeout on unsupported version in update, but got none") + } + + // Test case where inactivity_timeout IS supported (newer version) + featSupported := features{ + SupportsInactivityTimeout: true, + } + + // Test toAPICreateModel - should NOT return error when inactivity_timeout is supported + _, diags = model.toAPICreateModel(ctx, featSupported) + if diags.HasError() { + t.Errorf("Did not expect error when using inactivity_timeout on supported version: %v", diags) + } + + // Test toAPIUpdateModel - should NOT return error when inactivity_timeout is supported + _, diags = model.toAPIUpdateModel(ctx, featSupported) + if diags.HasError() { + t.Errorf("Did not expect error when using inactivity_timeout on supported version in update: %v", diags) + } + + // Test case where inactivity_timeout is not set (should not cause validation errors) + modelWithoutTimeout := &agentPolicyModel{ + Name: types.StringValue("test"), + Namespace: types.StringValue("default"), + // InactivityTimeout is not set (null/unknown) + } + + // Test toAPICreateModel - should NOT return error when inactivity_timeout is not set, even on unsupported version + _, diags = modelWithoutTimeout.toAPICreateModel(ctx, feat) + if diags.HasError() { + t.Errorf("Did not expect error when inactivity_timeout is not set: %v", diags) + } + + // Test toAPIUpdateModel - should NOT return error when inactivity_timeout is not set, even on unsupported version + _, diags = modelWithoutTimeout.toAPIUpdateModel(ctx, feat) + if diags.HasError() { + t.Errorf("Did not expect error when inactivity_timeout is not set in update: %v", diags) + } +} From d833b8fb3d3a487354cb538efd640f509a733247 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Mon, 15 Sep 2025 13:30:19 +1000 Subject: [PATCH 51/66] Update generated SLO client. (#1303) * Regenerate SLO client * Update SLO resource for spec updates --- Makefile | 2 +- generated/slo-spec.yml | 768 ++++++++++-- generated/slo/.openapi-generator/FILES | 56 +- generated/slo/README.md | 54 +- generated/slo/api/openapi.yaml | 994 +++++++++++++--- generated/slo/api_slo.go | 1051 +++++++++++++++-- generated/slo/client.go | 4 +- generated/slo/configuration.go | 25 +- ...SummaryRequest.md => BulkDeleteRequest.md} | 22 +- generated/slo/docs/BulkDeleteResponse.md | 56 + .../slo/docs/BulkDeleteStatusResponse.md | 108 ++ .../BulkDeleteStatusResponseResultsInner.md | 108 ++ generated/slo/docs/BulkPurgeRollupRequest.md | 72 ++ .../docs/BulkPurgeRollupRequestPurgePolicy.md | 108 ++ .../BulkPurgeRollupRequestPurgePolicyOneOf.md | 82 ++ ...BulkPurgeRollupRequestPurgePolicyOneOf1.md | 82 ++ generated/slo/docs/BulkPurgeRollupResponse.md | 56 + generated/slo/docs/CreateSloRequest.md | 8 +- generated/slo/docs/Filter.md | 82 ++ generated/slo/docs/FilterMeta.md | 352 ++++++ .../slo/docs/FindSloDefinitionsResponse.md | 186 +++ .../docs/FindSloDefinitionsResponseOneOf.md | 134 +++ .../docs/FindSloDefinitionsResponseOneOf1.md | 186 +++ generated/slo/docs/FindSloResponse.md | 60 +- .../{SloResponseGroupBy.md => GroupBy.md} | 14 +- .../docs/HistoricalSummaryResponseInner.md | 134 --- .../IndicatorPropertiesCustomKqlParams.md | 52 +- .../IndicatorPropertiesCustomMetricParams.md | 26 + ...rtiesCustomMetricParamsGoodMetricsInner.md | 2 +- ...icatorPropertiesCustomMetricParamsTotal.md | 10 +- ...tiesCustomMetricParamsTotalMetricsInner.md | 119 -- .../IndicatorPropertiesHistogramParams.md | 26 + ...ndicatorPropertiesTimesliceMetricParams.md | 26 + generated/slo/docs/KqlWithFilters.md | 82 ++ generated/slo/docs/KqlWithFiltersGood.md | 82 ++ generated/slo/docs/KqlWithFiltersOneOf.md | 82 ++ generated/slo/docs/KqlWithFiltersTotal.md | 82 ++ generated/slo/docs/Settings.md | 56 +- generated/slo/docs/SloApi.md | 400 ++++++- ...loResponse.md => SloDefinitionResponse.md} | 165 ++- generated/slo/docs/SloWithSummaryResponse.md | 387 ++++++ ....md => SloWithSummaryResponseIndicator.md} | 26 +- generated/slo/docs/UpdateSloRequest.md | 8 +- generated/slo/model_400_response.go | 2 +- generated/slo/model_401_response.go | 2 +- generated/slo/model_403_response.go | 2 +- generated/slo/model_404_response.go | 2 +- generated/slo/model_409_response.go | 2 +- generated/slo/model_budgeting_method.go | 2 +- generated/slo/model_bulk_delete_request.go | 116 ++ generated/slo/model_bulk_delete_response.go | 125 ++ .../slo/model_bulk_delete_status_response.go | 199 ++++ ...lk_delete_status_response_results_inner.go | 199 ++++ .../slo/model_bulk_purge_rollup_request.go | 143 +++ ..._bulk_purge_rollup_request_purge_policy.go | 145 +++ ...urge_rollup_request_purge_policy_one_of.go | 162 +++ ...ge_rollup_request_purge_policy_one_of_1.go | 162 +++ .../slo/model_bulk_purge_rollup_response.go | 125 ++ generated/slo/model_create_slo_request.go | 14 +- .../slo/model_create_slo_request_indicator.go | 2 +- generated/slo/model_create_slo_response.go | 2 +- .../slo/model_delete_slo_instances_request.go | 2 +- ...delete_slo_instances_request_list_inner.go | 2 +- generated/slo/model_error_budget.go | 2 +- generated/slo/model_filter.go | 160 +++ generated/slo/model_filter_meta.go | 531 +++++++++ .../model_find_slo_definitions_response.go | 145 +++ ...el_find_slo_definitions_response_one_of.go | 232 ++++ ..._find_slo_definitions_response_one_of_1.go | 311 +++++ generated/slo/model_find_slo_response.go | 93 +- ...response_group_by.go => model_group_by.go} | 52 +- .../slo/model_historical_summary_request.go | 116 -- ...model_historical_summary_response_inner.go | 232 ---- ...l_indicator_properties_apm_availability.go | 2 +- ...ator_properties_apm_availability_params.go | 2 +- .../model_indicator_properties_apm_latency.go | 2 +- ...indicator_properties_apm_latency_params.go | 2 +- .../model_indicator_properties_custom_kql.go | 4 +- ..._indicator_properties_custom_kql_params.go | 76 +- ...odel_indicator_properties_custom_metric.go | 2 +- ...dicator_properties_custom_metric_params.go | 39 +- ...or_properties_custom_metric_params_good.go | 2 +- ...custom_metric_params_good_metrics_inner.go | 4 +- ...r_properties_custom_metric_params_total.go | 14 +- ...ustom_metric_params_total_metrics_inner.go | 209 ---- .../model_indicator_properties_histogram.go | 2 +- ...l_indicator_properties_histogram_params.go | 39 +- ...icator_properties_histogram_params_good.go | 2 +- ...cator_properties_histogram_params_total.go | 2 +- ...l_indicator_properties_timeslice_metric.go | 2 +- ...ator_properties_timeslice_metric_params.go | 39 +- ...operties_timeslice_metric_params_metric.go | 2 +- ...lice_metric_params_metric_metrics_inner.go | 2 +- generated/slo/model_kql_with_filters.go | 145 +++ generated/slo/model_kql_with_filters_good.go | 145 +++ .../slo/model_kql_with_filters_one_of.go | 160 +++ generated/slo/model_kql_with_filters_total.go | 145 +++ generated/slo/model_objective.go | 2 +- generated/slo/model_settings.go | 92 +- ...se.go => model_slo_definition_response.go} | 233 ++-- .../slo/model_slo_with_summary_response.go | 557 +++++++++ ...el_slo_with_summary_response_indicator.go} | 92 +- generated/slo/model_summary.go | 2 +- generated/slo/model_summary_status.go | 2 +- generated/slo/model_time_window.go | 2 +- ...imeslice_metric_basic_metric_with_field.go | 2 +- ...model_timeslice_metric_doc_count_metric.go | 2 +- ...odel_timeslice_metric_percentile_metric.go | 2 +- generated/slo/model_update_slo_request.go | 14 +- generated/slo/response.go | 2 +- generated/slo/utils.go | 2 +- internal/clients/kibana/slo.go | 16 +- internal/clients/kibana/slo_test.go | 14 +- internal/kibana/alerting.go | 4 +- internal/kibana/slo.go | 94 +- internal/models/slo.go | 2 +- 116 files changed, 10124 insertions(+), 1774 deletions(-) rename generated/slo/docs/{HistoricalSummaryRequest.md => BulkDeleteRequest.md} (57%) create mode 100644 generated/slo/docs/BulkDeleteResponse.md create mode 100644 generated/slo/docs/BulkDeleteStatusResponse.md create mode 100644 generated/slo/docs/BulkDeleteStatusResponseResultsInner.md create mode 100644 generated/slo/docs/BulkPurgeRollupRequest.md create mode 100644 generated/slo/docs/BulkPurgeRollupRequestPurgePolicy.md create mode 100644 generated/slo/docs/BulkPurgeRollupRequestPurgePolicyOneOf.md create mode 100644 generated/slo/docs/BulkPurgeRollupRequestPurgePolicyOneOf1.md create mode 100644 generated/slo/docs/BulkPurgeRollupResponse.md create mode 100644 generated/slo/docs/Filter.md create mode 100644 generated/slo/docs/FilterMeta.md create mode 100644 generated/slo/docs/FindSloDefinitionsResponse.md create mode 100644 generated/slo/docs/FindSloDefinitionsResponseOneOf.md create mode 100644 generated/slo/docs/FindSloDefinitionsResponseOneOf1.md rename generated/slo/docs/{SloResponseGroupBy.md => GroupBy.md} (65%) delete mode 100644 generated/slo/docs/HistoricalSummaryResponseInner.md delete mode 100644 generated/slo/docs/IndicatorPropertiesCustomMetricParamsTotalMetricsInner.md create mode 100644 generated/slo/docs/KqlWithFilters.md create mode 100644 generated/slo/docs/KqlWithFiltersGood.md create mode 100644 generated/slo/docs/KqlWithFiltersOneOf.md create mode 100644 generated/slo/docs/KqlWithFiltersTotal.md rename generated/slo/docs/{SloResponse.md => SloDefinitionResponse.md} (60%) create mode 100644 generated/slo/docs/SloWithSummaryResponse.md rename generated/slo/docs/{SloResponseIndicator.md => SloWithSummaryResponseIndicator.md} (57%) create mode 100644 generated/slo/model_bulk_delete_request.go create mode 100644 generated/slo/model_bulk_delete_response.go create mode 100644 generated/slo/model_bulk_delete_status_response.go create mode 100644 generated/slo/model_bulk_delete_status_response_results_inner.go create mode 100644 generated/slo/model_bulk_purge_rollup_request.go create mode 100644 generated/slo/model_bulk_purge_rollup_request_purge_policy.go create mode 100644 generated/slo/model_bulk_purge_rollup_request_purge_policy_one_of.go create mode 100644 generated/slo/model_bulk_purge_rollup_request_purge_policy_one_of_1.go create mode 100644 generated/slo/model_bulk_purge_rollup_response.go create mode 100644 generated/slo/model_filter.go create mode 100644 generated/slo/model_filter_meta.go create mode 100644 generated/slo/model_find_slo_definitions_response.go create mode 100644 generated/slo/model_find_slo_definitions_response_one_of.go create mode 100644 generated/slo/model_find_slo_definitions_response_one_of_1.go rename generated/slo/{model_slo_response_group_by.go => model_group_by.go} (53%) delete mode 100644 generated/slo/model_historical_summary_request.go delete mode 100644 generated/slo/model_historical_summary_response_inner.go delete mode 100644 generated/slo/model_indicator_properties_custom_metric_params_total_metrics_inner.go create mode 100644 generated/slo/model_kql_with_filters.go create mode 100644 generated/slo/model_kql_with_filters_good.go create mode 100644 generated/slo/model_kql_with_filters_one_of.go create mode 100644 generated/slo/model_kql_with_filters_total.go rename generated/slo/{model_slo_response.go => model_slo_definition_response.go} (58%) create mode 100644 generated/slo/model_slo_with_summary_response.go rename generated/slo/{model_slo_response_indicator.go => model_slo_with_summary_response_indicator.go} (62%) diff --git a/Makefile b/Makefile index 9a534f1e5..bac041a4c 100644 --- a/Makefile +++ b/Makefile @@ -350,7 +350,7 @@ generate-slo-client: tools ## generate Kibana slo client -o /local/generated/slo \ --type-mappings=float32=float64 @ rm -rf generated/slo/go.mod generated/slo/go.sum generated/slo/test - @ go fmt ./generated/... + @ go fmt ./generated/slo/... .PHONY: generate-clients generate-clients: generate-alerting-client generate-slo-client generate-connectors-client ## generate all clients diff --git a/generated/slo-spec.yml b/generated/slo-spec.yml index d42e6831c..94ec0e6de 100644 --- a/generated/slo-spec.yml +++ b/generated/slo-spec.yml @@ -2,15 +2,17 @@ openapi: 3.0.1 info: title: SLOs description: OpenAPI schema for SLOs endpoints - version: '1.0' + version: '1.1' contact: name: Actionable Observability Team license: name: Elastic License 2.0 url: https://www.elastic.co/licensing/elastic-license servers: - - url: http://localhost:5601 - description: local + - url: https://{kibana_url} + variables: + kibana_url: + default: localhost:5601 security: - basicAuth: [] - apiKeyAuth: [] @@ -20,7 +22,7 @@ tags: paths: /s/{spaceId}/api/observability/slos: post: - summary: Creates an SLO. + summary: Create an SLO operationId: createSloOp description: | You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -66,10 +68,8 @@ paths: application/json: schema: $ref: '#/components/schemas/409_response' - servers: - - url: https://localhost:5601 get: - summary: Retrieves a paginated list of SLOs + summary: Get a paginated list of SLOs operationId: findSlosOp description: | You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -84,16 +84,30 @@ paths: schema: type: string example: 'slo.name:latency* and slo.tags : "prod"' + - name: size + in: query + description: The page size to use for cursor-based pagination, must be greater or equal than 1 + schema: + type: integer + default: 1 + example: 1 + - name: searchAfter + in: query + description: The cursor to use for fetching the results from, when using a cursor-base pagination. + schema: + type: array + items: + type: string - name: page in: query - description: The page number to return + description: The page to use for pagination, must be greater or equal than 1 schema: type: integer default: 1 example: 1 - name: perPage in: query - description: The number of SLOs to return per page + description: Number of SLOs returned by page schema: type: integer default: 25 @@ -121,6 +135,11 @@ paths: - desc default: asc example: asc + - name: hideStale + in: query + description: Hide stale SLOs from the list as defined by stale SLO threshold in SLO settings + schema: + type: boolean responses: '200': description: Successful request @@ -154,7 +173,7 @@ paths: $ref: '#/components/schemas/404_response' /s/{spaceId}/api/observability/slos/{sloId}: get: - summary: Retrieves a SLO + summary: Get an SLO operationId: getSloOp description: | You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -176,7 +195,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/slo_response' + $ref: '#/components/schemas/slo_with_summary_response' '400': description: Bad request content: @@ -202,7 +221,7 @@ paths: schema: $ref: '#/components/schemas/404_response' put: - summary: Updates an SLO + summary: Update an SLO operationId: updateSloOp description: | You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -224,7 +243,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/slo_response' + $ref: '#/components/schemas/slo_definition_response' '400': description: Bad request content: @@ -250,7 +269,7 @@ paths: schema: $ref: '#/components/schemas/404_response' delete: - summary: Deletes an SLO + summary: Delete an SLO operationId: deleteSloOp description: | You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -289,7 +308,7 @@ paths: $ref: '#/components/schemas/404_response' /s/{spaceId}/api/observability/slos/{sloId}/enable: post: - summary: Enables an SLO + summary: Enable an SLO operationId: enableSloOp description: | You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -328,8 +347,47 @@ paths: $ref: '#/components/schemas/404_response' /s/{spaceId}/api/observability/slos/{sloId}/disable: post: - summary: Disables an SLO + summary: Disable an SLO operationId: disableSloOp + description: | + You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + tags: + - slo + parameters: + - $ref: '#/components/parameters/kbn_xsrf' + - $ref: '#/components/parameters/space_id' + - $ref: '#/components/parameters/slo_id' + responses: + '204': + description: Successful request + '400': + description: Bad request + content: + application/json: + schema: + $ref: '#/components/schemas/400_response' + '401': + description: Unauthorized response + content: + application/json: + schema: + $ref: '#/components/schemas/401_response' + '403': + description: Unauthorized response + content: + application/json: + schema: + $ref: '#/components/schemas/403_response' + '404': + description: Not found response + content: + application/json: + schema: + $ref: '#/components/schemas/404_response' + /s/{spaceId}/api/observability/slos/{sloId}/_reset: + post: + summary: Reset an SLO + operationId: resetSloOp description: | You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. tags: @@ -341,6 +399,10 @@ paths: responses: '200': description: Successful request + content: + application/json: + schema: + $ref: '#/components/schemas/slo_definition_response' '400': description: Bad request content: @@ -365,12 +427,12 @@ paths: application/json: schema: $ref: '#/components/schemas/404_response' - /s/{spaceId}/internal/observability/slos/_historical_summary: + /s/{spaceId}/api/observability/slos/_bulk_purge_rollup: post: - summary: Retrieves the historical summary for a list of SLOs - operationId: historicalSummaryOp + summary: Batch delete rollup and summary data + operationId: deleteRollupDataOp description: | - You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + The deletion occurs for the specified list of `sloId`. You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. tags: - slo parameters: @@ -381,14 +443,81 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/historical_summary_request' + $ref: '#/components/schemas/bulk_purge_rollup_request' responses: '200': description: Successful request content: application/json: schema: - $ref: '#/components/schemas/historical_summary_response' + $ref: '#/components/schemas/bulk_purge_rollup_response' + '400': + description: Bad request + content: + application/json: + schema: + $ref: '#/components/schemas/400_response' + '401': + description: Unauthorized response + content: + application/json: + schema: + $ref: '#/components/schemas/401_response' + '403': + description: Unauthorized response + content: + application/json: + schema: + $ref: '#/components/schemas/403_response' + /s/{spaceId}/internal/observability/slos/_definitions: + get: + summary: Get the SLO definitions + operationId: getDefinitionsOp + description: | + You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + tags: + - slo + parameters: + - $ref: '#/components/parameters/kbn_xsrf' + - $ref: '#/components/parameters/space_id' + - name: includeOutdatedOnly + in: query + description: Indicates if the API returns only outdated SLO or all SLO definitions + schema: + type: boolean + example: true + - name: tags + in: query + description: Filters the SLOs by tag + schema: + type: string + - name: search + in: query + description: Filters the SLOs by name + schema: + type: string + example: my service availability + - name: page + in: query + description: The page to use for pagination, must be greater or equal than 1 + schema: + type: number + example: 1 + - name: perPage + in: query + description: Number of SLOs returned by page + schema: + type: integer + default: 100 + maximum: 1000 + example: 100 + responses: + '200': + description: Successful request + content: + application/json: + schema: + $ref: '#/components/schemas/find_slo_definitions_response' '400': description: Bad request content: @@ -409,10 +538,10 @@ paths: $ref: '#/components/schemas/403_response' /s/{spaceId}/api/observability/slos/_delete_instances: post: - summary: Batch delete rollup and summary data for the matching list of sloId and instanceId + summary: Batch delete rollup and summary data operationId: deleteSloInstancesOp description: | - You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + The deletion occurs for the specified list of `sloId` and `instanceId`. You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. tags: - slo parameters: @@ -445,8 +574,91 @@ paths: application/json: schema: $ref: '#/components/schemas/403_response' - servers: - - url: https://localhost:5601 + /s/{spaceId}/api/observability/slos/_bulk_delete: + post: + summary: Bulk delete SLO definitions and their associated summary and rollup data. + operationId: bulkDeleteOp + description: | + Bulk delete SLO definitions and their associated summary and rollup data. This endpoint initiates a bulk deletion operation for SLOs, which may take some time to complete. The status of the operation can be checked using the `GET /api/slo/_bulk_delete/{taskId}` endpoint. + tags: + - slo + parameters: + - $ref: '#/components/parameters/kbn_xsrf' + - $ref: '#/components/parameters/space_id' + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/bulk_delete_request' + responses: + '200': + description: Successful response + content: + application/json: + schema: + $ref: '#/components/schemas/bulk_delete_response' + '400': + description: Bad request + content: + application/json: + schema: + $ref: '#/components/schemas/400_response' + '401': + description: Unauthorized response + content: + application/json: + schema: + $ref: '#/components/schemas/401_response' + '403': + description: Unauthorized response + content: + application/json: + schema: + $ref: '#/components/schemas/403_response' + /s/{spaceId}/api/observability/slos/_bulk_delete/{taskId}: + get: + summary: Retrieve the status of the bulk deletion + operationId: bulkDeleteStatusOp + description: | + Retrieve the status of the bulk deletion operation for SLOs. This endpoint returns the status of the bulk deletion operation, including whether it is completed and the results of the operation. + tags: + - slo + parameters: + - $ref: '#/components/parameters/kbn_xsrf' + - $ref: '#/components/parameters/space_id' + - name: taskId + in: path + description: The task id of the bulk delete operation + required: true + schema: + type: string + example: 8853df00-ae2e-11ed-90af-09bb6422b258 + responses: + '200': + description: Successful response + content: + application/json: + schema: + $ref: '#/components/schemas/bulk_delete_status_response' + '400': + description: Bad request + content: + application/json: + schema: + $ref: '#/components/schemas/400_response' + '401': + description: Unauthorized response + content: + application/json: + schema: + $ref: '#/components/schemas/401_response' + '403': + description: Unauthorized response + content: + application/json: + schema: + $ref: '#/components/schemas/403_response' components: securitySchemes: basicAuth: @@ -529,12 +741,96 @@ components: description: The type of indicator. type: string example: sli.apm.transactionDuration + filter_meta: + title: FilterMeta + description: Defines properties for a filter + type: object + properties: + alias: + type: string + nullable: true + disabled: + type: boolean + negate: + type: boolean + controlledBy: + type: string + group: + type: string + index: + type: string + isMultiIndex: + type: boolean + type: + type: string + key: + type: string + params: + type: object + value: + type: string + field: + type: string + filter: + title: Filter + description: Defines properties for a filter + type: object + properties: + query: + type: object + meta: + $ref: '#/components/schemas/filter_meta' + kql_with_filters: + title: KQL with filters + description: Defines properties for a filter + oneOf: + - description: the KQL query to filter the documents with. + type: string + example: 'field.environment : "production" and service.name : "my-service"' + - type: object + properties: + kqlQuery: + type: string + filters: + type: array + items: + $ref: '#/components/schemas/filter' + kql_with_filters_good: + title: KQL query for good events + description: The KQL query used to define the good events. + oneOf: + - description: the KQL query to filter the documents with. + type: string + example: 'request.latency <= 150 and request.status_code : "2xx"' + - type: object + properties: + kqlQuery: + type: string + filters: + type: array + items: + $ref: '#/components/schemas/filter' + kql_with_filters_total: + title: KQL query for all events + description: The KQL query used to define all events. + oneOf: + - description: the KQL query to filter the documents with. + type: string + example: 'field.environment : "production" and service.name : "my-service"' + - type: object + properties: + kqlQuery: + type: string + filters: + type: array + items: + $ref: '#/components/schemas/filter' indicator_properties_custom_kql: - title: Custom KQL + title: Custom Query required: - type - params - description: Defines properties for a custom KQL indicator type + description: Defines properties for a custom query indicator type type: object properties: params: @@ -551,18 +847,16 @@ components: description: The index or index pattern to use type: string example: my-service-* - filter: - description: the KQL query to filter the documents with. + dataViewId: + description: The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. type: string - example: 'field.environment : "production" and service.name : "my-service"' + example: 03b80ab3-003d-498b-881c-3beedbaf1162 + filter: + $ref: '#/components/schemas/kql_with_filters' good: - description: the KQL query used to define the good events. - type: string - example: 'request.latency <= 150 and request.status_code : "2xx"' + $ref: '#/components/schemas/kql_with_filters_good' total: - description: the KQL query used to define all events. - type: string - example: '' + $ref: '#/components/schemas/kql_with_filters_total' timestampField: description: | The timestamp field used in the source indice. @@ -646,6 +940,10 @@ components: description: The index or index pattern to use type: string example: my-service-* + dataViewId: + description: The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. + type: string + example: 03b80ab3-003d-498b-881c-3beedbaf1162 filter: description: the KQL query to filter the documents with. type: string @@ -679,11 +977,12 @@ components: example: A pattern: ^[A-Z]$ aggregation: - description: The aggregation type of the metric. Only valid option is "sum" + description: The aggregation type of the metric. type: string example: sum enum: - sum + - doc_count field: description: The field of the metric. type: string @@ -691,7 +990,7 @@ components: filter: description: The filter to apply to the metric. type: string - example: 'processor.outcome: "success"' + example: 'processor.outcome: *' equation: description: The equation to calculate the "good" metric. type: string @@ -720,11 +1019,12 @@ components: example: A pattern: ^[A-Z]$ aggregation: - description: The aggregation type of the metric. Only valid option is "sum" + description: The aggregation type of the metric. type: string example: sum enum: - sum + - doc_count field: description: The field of the metric. type: string @@ -763,6 +1063,10 @@ components: description: The index or index pattern to use type: string example: my-service-* + dataViewId: + description: The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. + type: string + example: 03b80ab3-003d-498b-881c-3beedbaf1162 filter: description: the KQL query to filter the documents with. type: string @@ -838,12 +1142,13 @@ components: description: The type of indicator. type: string example: sli.histogram.custom - timeslice_metric_basic_metric_with_field: - title: Timeslice Metric Basic Metric with Field + timeslice_metric_percentile_metric: + title: Timeslice Metric Percentile Metric required: - name - aggregation - field + - percentile type: object properties: name: @@ -852,32 +1157,28 @@ components: example: A pattern: ^[A-Z]$ aggregation: - description: The aggregation type of the metric. + description: The aggregation type of the metric. Only valid option is "percentile" type: string - example: sum + example: percentile enum: - - sum - - avg - - min - - max - - std_deviation - - last_value - - cardinality + - percentile field: description: The field of the metric. type: string example: processor.processed + percentile: + description: The percentile value. + type: number + example: 95 filter: description: The filter to apply to the metric. type: string example: 'processor.outcome: "success"' - timeslice_metric_percentile_metric: - title: Timeslice Metric Percentile Metric + timeslice_metric_doc_count_metric: + title: Timeslice Metric Doc Count Metric required: - name - aggregation - - field - - percentile type: object properties: name: @@ -886,28 +1187,21 @@ components: example: A pattern: ^[A-Z]$ aggregation: - description: The aggregation type of the metric. Only valid option is "percentile" + description: The aggregation type of the metric. Only valid option is "doc_count" type: string - example: percentile + example: doc_count enum: - - percentile - field: - description: The field of the metric. - type: string - example: processor.processed - percentile: - description: The percentile value. - type: number - example: 95 + - doc_count filter: description: The filter to apply to the metric. type: string example: 'processor.outcome: "success"' - timeslice_metric_doc_count_metric: - title: Timeslice Metric Doc Count Metric + timeslice_metric_basic_metric_with_field: + title: Timeslice Metric Basic Metric with Field required: - name - aggregation + - field type: object properties: name: @@ -916,11 +1210,21 @@ components: example: A pattern: ^[A-Z]$ aggregation: - description: The aggregation type of the metric. Only valid option is "doc_count" + description: The aggregation type of the metric. type: string - example: doc_count + example: sum enum: - - doc_count + - sum + - avg + - min + - max + - std_deviation + - last_value + - cardinality + field: + description: The field of the metric. + type: string + example: processor.processed filter: description: The filter to apply to the metric. type: string @@ -946,6 +1250,10 @@ components: description: The index or index pattern to use type: string example: my-service-* + dataViewId: + description: The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. + type: string + example: 03b80ab3-003d-498b-881c-3beedbaf1162 filter: description: the KQL query to filter the documents with. type: string @@ -1043,10 +1351,16 @@ components: target: description: the target objective between 0 and 1 excluded type: number + minimum: 0 + maximum: 100 + exclusiveMinimum: true + exclusiveMaximum: true example: 0.99 timesliceTarget: description: the target objective for each slice when using a timeslices budgeting method type: number + minimum: 0 + maximum: 100 example: 0.995 timesliceWindow: description: the duration of each slice when using a timeslices budgeting method, as {duraton}{unit} @@ -1057,14 +1371,25 @@ components: description: Defines properties for SLO settings. type: object properties: + syncField: + description: The date field that is used to identify new documents in the source. It is strongly recommended to use a field that contains the ingest timestamp. If you use a different field, you might need to set the delay such that it accounts for data transmission delays. When unspecified, we use the indicator timestamp field. + type: string + example: event.ingested syncDelay: - description: The synch delay to apply to the transform. Default 1m + description: The time delay in minutes between the current time and the latest source data time. Increasing the value will delay any alerting. The default value is 1 minute. The minimum value is 1m and the maximum is 359m. It should always be greater then source index refresh interval. type: string + default: 1m example: 5m frequency: - description: Configure how often the transform runs, default 1m + description: The interval between checks for changes in the source data. The minimum value is 1m and the maximum is 59m. The default value is 1 minute. type: string + default: 1m example: 5m + preventInitialBackfill: + description: Start aggregating data from the time the SLO is created, instead of backfilling data from the beginning of the time window. + type: boolean + default: false + example: true summary_status: title: summary status type: string @@ -1115,7 +1440,20 @@ components: example: 0.9836 errorBudget: $ref: '#/components/schemas/error_budget' - slo_response: + group_by: + title: Group by + description: optional group by field or fields to use to generate an SLO per distinct value + example: + - - service.name + - service.name + - - service.name + - service.environment + oneOf: + - type: string + - type: array + items: + type: string + slo_with_summary_response: title: SLO response type: object required: @@ -1135,6 +1473,7 @@ components: - tags - createdAt - updatedAt + - version properties: id: description: The identifier of the SLO. @@ -1184,13 +1523,7 @@ components: type: boolean example: true groupBy: - description: optional group by field to use to generate an SLO per distinct value - oneOf: - - type: string - - type: array - items: - type: string - example: some.field + $ref: '#/components/schemas/group_by' instanceId: description: the value derived from the groupBy field, if present, otherwise '*' type: string @@ -1208,12 +1541,22 @@ components: description: The last update date type: string example: '2023-01-12T10:03:19.000Z' + version: + description: The internal SLO version + type: number + example: 2 find_slo_response: title: Find SLO response description: | A paginated response of SLOs matching the query. type: object properties: + size: + type: number + example: 25 + description: Size provided for cursor based pagination + searchAfter: + type: string page: type: number example: 1 @@ -1226,7 +1569,7 @@ components: results: type: array items: - $ref: '#/components/schemas/slo_response' + $ref: '#/components/schemas/slo_with_summary_response' 400_response: title: Bad request type: object @@ -1335,13 +1678,7 @@ components: settings: $ref: '#/components/schemas/settings' groupBy: - description: optional group by field to use to generate an SLO per distinct value - oneOf: - - type: string - - type: array - items: - type: string - example: some.field + $ref: '#/components/schemas/group_by' tags: description: List of tags type: array @@ -1402,48 +1739,197 @@ components: settings: $ref: '#/components/schemas/settings' groupBy: - description: optional group by field to use to generate an SLO per distinct value + $ref: '#/components/schemas/group_by' + tags: + description: List of tags + type: array + items: + type: string + slo_definition_response: + title: SLO definition response + type: object + required: + - id + - name + - description + - indicator + - timeWindow + - budgetingMethod + - objective + - settings + - revision + - enabled + - groupBy + - tags + - createdAt + - updatedAt + - version + properties: + id: + description: The identifier of the SLO. + type: string + example: 8853df00-ae2e-11ed-90af-09bb6422b258 + name: + description: The name of the SLO. + type: string + example: My Service SLO + description: + description: The description of the SLO. + type: string + example: My SLO description + indicator: + discriminator: + propertyName: type + mapping: + sli.apm.transactionErrorRate: '#/components/schemas/indicator_properties_apm_availability' + sli.kql.custom: '#/components/schemas/indicator_properties_custom_kql' + sli.apm.transactionDuration: '#/components/schemas/indicator_properties_apm_latency' + sli.metric.custom: '#/components/schemas/indicator_properties_custom_metric' + sli.histogram.custom: '#/components/schemas/indicator_properties_histogram' + sli.metric.timeslice: '#/components/schemas/indicator_properties_timeslice_metric' oneOf: - - type: string - - type: array - items: - type: string - example: some.field + - $ref: '#/components/schemas/indicator_properties_custom_kql' + - $ref: '#/components/schemas/indicator_properties_apm_availability' + - $ref: '#/components/schemas/indicator_properties_apm_latency' + - $ref: '#/components/schemas/indicator_properties_custom_metric' + - $ref: '#/components/schemas/indicator_properties_histogram' + - $ref: '#/components/schemas/indicator_properties_timeslice_metric' + timeWindow: + $ref: '#/components/schemas/time_window' + budgetingMethod: + $ref: '#/components/schemas/budgeting_method' + objective: + $ref: '#/components/schemas/objective' + settings: + $ref: '#/components/schemas/settings' + revision: + description: The SLO revision + type: number + example: 2 + enabled: + description: Indicate if the SLO is enabled + type: boolean + example: true + groupBy: + $ref: '#/components/schemas/group_by' tags: description: List of tags type: array items: type: string - historical_summary_request: - title: Historical summary request + createdAt: + description: The creation date + type: string + example: '2023-01-12T10:03:19.000Z' + updatedAt: + description: The last update date + type: string + example: '2023-01-12T10:03:19.000Z' + version: + description: The internal SLO version + type: number + example: 2 + bulk_purge_rollup_request: + title: Bulk Purge Rollup data request + description: | + The bulk purge rollup data request takes a list of SLO ids and a purge policy, then deletes the rollup data according to the purge policy. This API can be used to remove the staled data of an instance SLO that no longer get updated. type: object required: - list + - purgePolicy properties: list: - description: The list of SLO identifiers to get the historical summary for + description: An array of slo ids type: array items: type: string + description: The SLO Definition id example: 8853df00-ae2e-11ed-90af-09bb6422b258 - historical_summary_response: - title: Historical summary response - type: object - additionalProperties: - type: array - items: + purgePolicy: + description: Policy that dictates which SLI documents to purge based on age type: object + oneOf: + - type: object + properties: + purgeType: + type: string + description: Specifies whether documents will be purged based on a specific age or on a timestamp + enum: + - fixed-age + age: + type: string + description: The duration to determine which documents to purge, formatted as {duration}{unit}. This value should be greater than or equal to the time window of every SLO provided. + example: 7d + - type: object + properties: + purgeType: + type: string + description: Specifies whether documents will be purged based on a specific age or on a timestamp + enum: + - fixed-time + timestamp: + type: string + description: The timestamp to determine which documents to purge, formatted in ISO. This value should be older than the applicable time window of every SLO provided. + example: '2024-12-31T00:00:00.000Z' + bulk_purge_rollup_response: + title: Bulk Purge Rollup data response + description: | + The bulk purge rollup data response returns a task id from the elasticsearch deleteByQuery response. + type: object + properties: + taskId: + type: string + description: The task id of the purge operation + example: 8853df00-ae2e-11ed-90af-09bb6422b258 + find_slo_definitions_response: + title: Find SLO definitions response + description: | + A paginated response of SLO definitions matching the query. + type: object + oneOf: + - type: object properties: - date: - type: string - example: '2022-01-01T00:00:00.000Z' - status: - $ref: '#/components/schemas/summary_status' - sliValue: + page: + type: number + example: 1 + perPage: type: number - example: 0.9836 - errorBudget: - $ref: '#/components/schemas/error_budget' + example: 25 + total: + type: number + example: 34 + results: + type: array + items: + $ref: '#/components/schemas/slo_with_summary_response' + - type: object + properties: + page: + type: number + default: 1 + description: for backward compability + perPage: + type: number + example: 25 + description: for backward compability + size: + type: number + example: 25 + searchAfter: + type: array + items: + type: string + example: + - some-slo-id + - other-cursor-id + description: the cursor to provide to get the next paged results + total: + type: number + example: 34 + results: + type: array + items: + $ref: '#/components/schemas/slo_with_summary_response' delete_slo_instances_request: title: Delete SLO instances request description: | @@ -1469,3 +1955,59 @@ components: description: The SLO instance identifier type: string example: 8853df00-ae2e-11ed-90af-09bb6422b258 + bulk_delete_request: + title: Bulk delete SLO request + description: | + The bulk delete SLO request takes a list of SLOs Definition id to delete. + type: object + required: + - list + properties: + list: + description: An array of SLO Definition id + type: array + items: + type: string + example: 8853df00-ae2e-11ed-90af-09bb6422b258 + description: The SLO Definition id + bulk_delete_response: + title: Bulk delete SLO response + description: | + The bulk delete SLO response returns a taskId that can be used to poll for its status + type: object + properties: + taskId: + type: string + example: d08506b7-f0e8-4f8b-a06a-a83940f4db91 + description: The taskId of the bulk delete operation + bulk_delete_status_response: + title: The status of the bulk deletion + description: Indicates if the bulk deletion is completed, with the detailed results of the operation. + type: object + properties: + isDone: + type: boolean + example: true + description: Indicates if the bulk deletion operation is completed + error: + type: string + example: Task not found + description: The error message if the bulk deletion operation failed + results: + description: The results of the bulk deletion operation, including the success status and any errors for each SLO + type: array + items: + type: object + properties: + id: + type: string + example: d08506b7-f0e8-4f8b-a06a-a83940f4db91 + description: The ID of the SLO that was deleted + success: + type: boolean + example: true + description: The result of the deletion operation for this SLO + error: + type: string + example: SLO [d08506b7-f0e8-4f8b-a06a-a83940f4db91] not found + description: The error message if the deletion operation failed for this SLO diff --git a/generated/slo/.openapi-generator/FILES b/generated/slo/.openapi-generator/FILES index 9145efdc6..b316f881d 100644 --- a/generated/slo/.openapi-generator/FILES +++ b/generated/slo/.openapi-generator/FILES @@ -7,15 +7,28 @@ api_slo.go client.go configuration.go docs/BudgetingMethod.md +docs/BulkDeleteRequest.md +docs/BulkDeleteResponse.md +docs/BulkDeleteStatusResponse.md +docs/BulkDeleteStatusResponseResultsInner.md +docs/BulkPurgeRollupRequest.md +docs/BulkPurgeRollupRequestPurgePolicy.md +docs/BulkPurgeRollupRequestPurgePolicyOneOf.md +docs/BulkPurgeRollupRequestPurgePolicyOneOf1.md +docs/BulkPurgeRollupResponse.md docs/CreateSloRequest.md docs/CreateSloRequestIndicator.md docs/CreateSloResponse.md docs/DeleteSloInstancesRequest.md docs/DeleteSloInstancesRequestListInner.md docs/ErrorBudget.md +docs/Filter.md +docs/FilterMeta.md +docs/FindSloDefinitionsResponse.md +docs/FindSloDefinitionsResponseOneOf.md +docs/FindSloDefinitionsResponseOneOf1.md docs/FindSloResponse.md -docs/HistoricalSummaryRequest.md -docs/HistoricalSummaryResponseInner.md +docs/GroupBy.md docs/IndicatorPropertiesApmAvailability.md docs/IndicatorPropertiesApmAvailabilityParams.md docs/IndicatorPropertiesApmLatency.md @@ -27,7 +40,6 @@ docs/IndicatorPropertiesCustomMetricParams.md docs/IndicatorPropertiesCustomMetricParamsGood.md docs/IndicatorPropertiesCustomMetricParamsGoodMetricsInner.md docs/IndicatorPropertiesCustomMetricParamsTotal.md -docs/IndicatorPropertiesCustomMetricParamsTotalMetricsInner.md docs/IndicatorPropertiesHistogram.md docs/IndicatorPropertiesHistogramParams.md docs/IndicatorPropertiesHistogramParamsGood.md @@ -36,6 +48,10 @@ docs/IndicatorPropertiesTimesliceMetric.md docs/IndicatorPropertiesTimesliceMetricParams.md docs/IndicatorPropertiesTimesliceMetricParamsMetric.md docs/IndicatorPropertiesTimesliceMetricParamsMetricMetricsInner.md +docs/KqlWithFilters.md +docs/KqlWithFiltersGood.md +docs/KqlWithFiltersOneOf.md +docs/KqlWithFiltersTotal.md docs/Model400Response.md docs/Model401Response.md docs/Model403Response.md @@ -44,9 +60,9 @@ docs/Model409Response.md docs/Objective.md docs/Settings.md docs/SloAPI.md -docs/SloResponse.md -docs/SloResponseGroupBy.md -docs/SloResponseIndicator.md +docs/SloDefinitionResponse.md +docs/SloWithSummaryResponse.md +docs/SloWithSummaryResponseIndicator.md docs/Summary.md docs/SummaryStatus.md docs/TimeWindow.md @@ -63,15 +79,28 @@ model_403_response.go model_404_response.go model_409_response.go model_budgeting_method.go +model_bulk_delete_request.go +model_bulk_delete_response.go +model_bulk_delete_status_response.go +model_bulk_delete_status_response_results_inner.go +model_bulk_purge_rollup_request.go +model_bulk_purge_rollup_request_purge_policy.go +model_bulk_purge_rollup_request_purge_policy_one_of.go +model_bulk_purge_rollup_request_purge_policy_one_of_1.go +model_bulk_purge_rollup_response.go model_create_slo_request.go model_create_slo_request_indicator.go model_create_slo_response.go model_delete_slo_instances_request.go model_delete_slo_instances_request_list_inner.go model_error_budget.go +model_filter.go +model_filter_meta.go +model_find_slo_definitions_response.go +model_find_slo_definitions_response_one_of.go +model_find_slo_definitions_response_one_of_1.go model_find_slo_response.go -model_historical_summary_request.go -model_historical_summary_response_inner.go +model_group_by.go model_indicator_properties_apm_availability.go model_indicator_properties_apm_availability_params.go model_indicator_properties_apm_latency.go @@ -83,7 +112,6 @@ model_indicator_properties_custom_metric_params.go model_indicator_properties_custom_metric_params_good.go model_indicator_properties_custom_metric_params_good_metrics_inner.go model_indicator_properties_custom_metric_params_total.go -model_indicator_properties_custom_metric_params_total_metrics_inner.go model_indicator_properties_histogram.go model_indicator_properties_histogram_params.go model_indicator_properties_histogram_params_good.go @@ -92,11 +120,15 @@ model_indicator_properties_timeslice_metric.go model_indicator_properties_timeslice_metric_params.go model_indicator_properties_timeslice_metric_params_metric.go model_indicator_properties_timeslice_metric_params_metric_metrics_inner.go +model_kql_with_filters.go +model_kql_with_filters_good.go +model_kql_with_filters_one_of.go +model_kql_with_filters_total.go model_objective.go model_settings.go -model_slo_response.go -model_slo_response_group_by.go -model_slo_response_indicator.go +model_slo_definition_response.go +model_slo_with_summary_response.go +model_slo_with_summary_response_indicator.go model_summary.go model_summary_status.go model_time_window.go diff --git a/generated/slo/README.md b/generated/slo/README.md index 261fc2d0e..cd85430f7 100644 --- a/generated/slo/README.md +++ b/generated/slo/README.md @@ -5,7 +5,7 @@ OpenAPI schema for SLOs endpoints ## Overview This API client was generated by the [OpenAPI Generator](https://openapi-generator.tech) project. By using the [OpenAPI-spec](https://www.openapis.org/) from a remote server, you can easily generate an API client. -- API version: 1.0 +- API version: 1.1 - Package version: 1.0.0 - Build package: org.openapitools.codegen.languages.GoClientCodegen @@ -73,33 +73,50 @@ ctx = context.WithValue(context.Background(), slo.ContextOperationServerVariable ## Documentation for API Endpoints -All URIs are relative to *http://localhost:5601* +All URIs are relative to *https://localhost:5601* Class | Method | HTTP request | Description ------------ | ------------- | ------------- | ------------- -*SloAPI* | [**CreateSloOp**](docs/SloAPI.md#createsloop) | **Post** /s/{spaceId}/api/observability/slos | Creates an SLO. -*SloAPI* | [**DeleteSloInstancesOp**](docs/SloAPI.md#deletesloinstancesop) | **Post** /s/{spaceId}/api/observability/slos/_delete_instances | Batch delete rollup and summary data for the matching list of sloId and instanceId -*SloAPI* | [**DeleteSloOp**](docs/SloAPI.md#deletesloop) | **Delete** /s/{spaceId}/api/observability/slos/{sloId} | Deletes an SLO -*SloAPI* | [**DisableSloOp**](docs/SloAPI.md#disablesloop) | **Post** /s/{spaceId}/api/observability/slos/{sloId}/disable | Disables an SLO -*SloAPI* | [**EnableSloOp**](docs/SloAPI.md#enablesloop) | **Post** /s/{spaceId}/api/observability/slos/{sloId}/enable | Enables an SLO -*SloAPI* | [**FindSlosOp**](docs/SloAPI.md#findslosop) | **Get** /s/{spaceId}/api/observability/slos | Retrieves a paginated list of SLOs -*SloAPI* | [**GetSloOp**](docs/SloAPI.md#getsloop) | **Get** /s/{spaceId}/api/observability/slos/{sloId} | Retrieves a SLO -*SloAPI* | [**HistoricalSummaryOp**](docs/SloAPI.md#historicalsummaryop) | **Post** /s/{spaceId}/internal/observability/slos/_historical_summary | Retrieves the historical summary for a list of SLOs -*SloAPI* | [**UpdateSloOp**](docs/SloAPI.md#updatesloop) | **Put** /s/{spaceId}/api/observability/slos/{sloId} | Updates an SLO +*SloAPI* | [**BulkDeleteOp**](docs/SloAPI.md#bulkdeleteop) | **Post** /s/{spaceId}/api/observability/slos/_bulk_delete | Bulk delete SLO definitions and their associated summary and rollup data. +*SloAPI* | [**BulkDeleteStatusOp**](docs/SloAPI.md#bulkdeletestatusop) | **Get** /s/{spaceId}/api/observability/slos/_bulk_delete/{taskId} | Retrieve the status of the bulk deletion +*SloAPI* | [**CreateSloOp**](docs/SloAPI.md#createsloop) | **Post** /s/{spaceId}/api/observability/slos | Create an SLO +*SloAPI* | [**DeleteRollupDataOp**](docs/SloAPI.md#deleterollupdataop) | **Post** /s/{spaceId}/api/observability/slos/_bulk_purge_rollup | Batch delete rollup and summary data +*SloAPI* | [**DeleteSloInstancesOp**](docs/SloAPI.md#deletesloinstancesop) | **Post** /s/{spaceId}/api/observability/slos/_delete_instances | Batch delete rollup and summary data +*SloAPI* | [**DeleteSloOp**](docs/SloAPI.md#deletesloop) | **Delete** /s/{spaceId}/api/observability/slos/{sloId} | Delete an SLO +*SloAPI* | [**DisableSloOp**](docs/SloAPI.md#disablesloop) | **Post** /s/{spaceId}/api/observability/slos/{sloId}/disable | Disable an SLO +*SloAPI* | [**EnableSloOp**](docs/SloAPI.md#enablesloop) | **Post** /s/{spaceId}/api/observability/slos/{sloId}/enable | Enable an SLO +*SloAPI* | [**FindSlosOp**](docs/SloAPI.md#findslosop) | **Get** /s/{spaceId}/api/observability/slos | Get a paginated list of SLOs +*SloAPI* | [**GetDefinitionsOp**](docs/SloAPI.md#getdefinitionsop) | **Get** /s/{spaceId}/internal/observability/slos/_definitions | Get the SLO definitions +*SloAPI* | [**GetSloOp**](docs/SloAPI.md#getsloop) | **Get** /s/{spaceId}/api/observability/slos/{sloId} | Get an SLO +*SloAPI* | [**ResetSloOp**](docs/SloAPI.md#resetsloop) | **Post** /s/{spaceId}/api/observability/slos/{sloId}/_reset | Reset an SLO +*SloAPI* | [**UpdateSloOp**](docs/SloAPI.md#updatesloop) | **Put** /s/{spaceId}/api/observability/slos/{sloId} | Update an SLO ## Documentation For Models - [BudgetingMethod](docs/BudgetingMethod.md) + - [BulkDeleteRequest](docs/BulkDeleteRequest.md) + - [BulkDeleteResponse](docs/BulkDeleteResponse.md) + - [BulkDeleteStatusResponse](docs/BulkDeleteStatusResponse.md) + - [BulkDeleteStatusResponseResultsInner](docs/BulkDeleteStatusResponseResultsInner.md) + - [BulkPurgeRollupRequest](docs/BulkPurgeRollupRequest.md) + - [BulkPurgeRollupRequestPurgePolicy](docs/BulkPurgeRollupRequestPurgePolicy.md) + - [BulkPurgeRollupRequestPurgePolicyOneOf](docs/BulkPurgeRollupRequestPurgePolicyOneOf.md) + - [BulkPurgeRollupRequestPurgePolicyOneOf1](docs/BulkPurgeRollupRequestPurgePolicyOneOf1.md) + - [BulkPurgeRollupResponse](docs/BulkPurgeRollupResponse.md) - [CreateSloRequest](docs/CreateSloRequest.md) - [CreateSloRequestIndicator](docs/CreateSloRequestIndicator.md) - [CreateSloResponse](docs/CreateSloResponse.md) - [DeleteSloInstancesRequest](docs/DeleteSloInstancesRequest.md) - [DeleteSloInstancesRequestListInner](docs/DeleteSloInstancesRequestListInner.md) - [ErrorBudget](docs/ErrorBudget.md) + - [Filter](docs/Filter.md) + - [FilterMeta](docs/FilterMeta.md) + - [FindSloDefinitionsResponse](docs/FindSloDefinitionsResponse.md) + - [FindSloDefinitionsResponseOneOf](docs/FindSloDefinitionsResponseOneOf.md) + - [FindSloDefinitionsResponseOneOf1](docs/FindSloDefinitionsResponseOneOf1.md) - [FindSloResponse](docs/FindSloResponse.md) - - [HistoricalSummaryRequest](docs/HistoricalSummaryRequest.md) - - [HistoricalSummaryResponseInner](docs/HistoricalSummaryResponseInner.md) + - [GroupBy](docs/GroupBy.md) - [IndicatorPropertiesApmAvailability](docs/IndicatorPropertiesApmAvailability.md) - [IndicatorPropertiesApmAvailabilityParams](docs/IndicatorPropertiesApmAvailabilityParams.md) - [IndicatorPropertiesApmLatency](docs/IndicatorPropertiesApmLatency.md) @@ -111,7 +128,6 @@ Class | Method | HTTP request | Description - [IndicatorPropertiesCustomMetricParamsGood](docs/IndicatorPropertiesCustomMetricParamsGood.md) - [IndicatorPropertiesCustomMetricParamsGoodMetricsInner](docs/IndicatorPropertiesCustomMetricParamsGoodMetricsInner.md) - [IndicatorPropertiesCustomMetricParamsTotal](docs/IndicatorPropertiesCustomMetricParamsTotal.md) - - [IndicatorPropertiesCustomMetricParamsTotalMetricsInner](docs/IndicatorPropertiesCustomMetricParamsTotalMetricsInner.md) - [IndicatorPropertiesHistogram](docs/IndicatorPropertiesHistogram.md) - [IndicatorPropertiesHistogramParams](docs/IndicatorPropertiesHistogramParams.md) - [IndicatorPropertiesHistogramParamsGood](docs/IndicatorPropertiesHistogramParamsGood.md) @@ -120,6 +136,10 @@ Class | Method | HTTP request | Description - [IndicatorPropertiesTimesliceMetricParams](docs/IndicatorPropertiesTimesliceMetricParams.md) - [IndicatorPropertiesTimesliceMetricParamsMetric](docs/IndicatorPropertiesTimesliceMetricParamsMetric.md) - [IndicatorPropertiesTimesliceMetricParamsMetricMetricsInner](docs/IndicatorPropertiesTimesliceMetricParamsMetricMetricsInner.md) + - [KqlWithFilters](docs/KqlWithFilters.md) + - [KqlWithFiltersGood](docs/KqlWithFiltersGood.md) + - [KqlWithFiltersOneOf](docs/KqlWithFiltersOneOf.md) + - [KqlWithFiltersTotal](docs/KqlWithFiltersTotal.md) - [Model400Response](docs/Model400Response.md) - [Model401Response](docs/Model401Response.md) - [Model403Response](docs/Model403Response.md) @@ -127,9 +147,9 @@ Class | Method | HTTP request | Description - [Model409Response](docs/Model409Response.md) - [Objective](docs/Objective.md) - [Settings](docs/Settings.md) - - [SloResponse](docs/SloResponse.md) - - [SloResponseGroupBy](docs/SloResponseGroupBy.md) - - [SloResponseIndicator](docs/SloResponseIndicator.md) + - [SloDefinitionResponse](docs/SloDefinitionResponse.md) + - [SloWithSummaryResponse](docs/SloWithSummaryResponse.md) + - [SloWithSummaryResponseIndicator](docs/SloWithSummaryResponseIndicator.md) - [Summary](docs/Summary.md) - [SummaryStatus](docs/SummaryStatus.md) - [TimeWindow](docs/TimeWindow.md) diff --git a/generated/slo/api/openapi.yaml b/generated/slo/api/openapi.yaml index c854dbc3c..b1c356cb5 100644 --- a/generated/slo/api/openapi.yaml +++ b/generated/slo/api/openapi.yaml @@ -7,10 +7,12 @@ info: name: Elastic License 2.0 url: https://www.elastic.co/licensing/elastic-license title: SLOs - version: "1.0" + version: "1.1" servers: -- description: local - url: http://localhost:5601 +- url: "https://{kibana_url}" + variables: + kibana_url: + default: localhost:5601 security: - basicAuth: [] - apiKeyAuth: [] @@ -51,7 +53,30 @@ paths: schema: type: string style: form - - description: The page number to return + - description: "The page size to use for cursor-based pagination, must be greater\ + \ or equal than 1" + example: 1 + explode: true + in: query + name: size + required: false + schema: + default: 1 + type: integer + style: form + - description: "The cursor to use for fetching the results from, when using\ + \ a cursor-base pagination." + explode: true + in: query + name: searchAfter + required: false + schema: + items: + type: string + type: array + style: form + - description: "The page to use for pagination, must be greater or equal than\ + \ 1" example: 1 explode: true in: query @@ -61,7 +86,7 @@ paths: default: 1 type: integer style: form - - description: The number of SLOs to return per page + - description: Number of SLOs returned by page example: 25 explode: true in: query @@ -100,6 +125,15 @@ paths: - desc type: string style: form + - description: Hide stale SLOs from the list as defined by stale SLO threshold + in SLO settings + explode: true + in: query + name: hideStale + required: false + schema: + type: boolean + style: form responses: "200": content: @@ -131,7 +165,7 @@ paths: schema: $ref: '#/components/schemas/404_response' description: Not found response - summary: Retrieves a paginated list of SLOs + summary: Get a paginated list of SLOs tags: - slo post: @@ -194,9 +228,7 @@ paths: schema: $ref: '#/components/schemas/409_response' description: Conflict - The SLO id already exists - servers: - - url: https://localhost:5601 - summary: Creates an SLO. + summary: Create an SLO tags: - slo /s/{spaceId}/api/observability/slos/{sloId}: @@ -259,7 +291,7 @@ paths: schema: $ref: '#/components/schemas/404_response' description: Not found response - summary: Deletes an SLO + summary: Delete an SLO tags: - slo get: @@ -308,7 +340,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/slo_response' + $ref: '#/components/schemas/slo_with_summary_response' description: Successful request "400": content: @@ -334,7 +366,7 @@ paths: schema: $ref: '#/components/schemas/404_response' description: Not found response - summary: Retrieves a SLO + summary: Get an SLO tags: - slo put: @@ -380,7 +412,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/slo_response' + $ref: '#/components/schemas/slo_definition_response' description: Successful request "400": content: @@ -406,7 +438,7 @@ paths: schema: $ref: '#/components/schemas/404_response' description: Not found response - summary: Updates an SLO + summary: Update an SLO tags: - slo /s/{spaceId}/api/observability/slos/{sloId}/enable: @@ -469,7 +501,7 @@ paths: schema: $ref: '#/components/schemas/404_response' description: Not found response - summary: Enables an SLO + summary: Enable an SLO tags: - slo /s/{spaceId}/api/observability/slos/{sloId}/disable: @@ -478,6 +510,69 @@ paths: You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. operationId: disableSloOp parameters: + - description: Cross-site request forgery protection + explode: false + in: header + name: kbn-xsrf + required: true + schema: + type: string + style: simple + - description: "An identifier for the space. If `/s/` and the identifier are\ + \ omitted from the path, the default space is used." + explode: false + in: path + name: spaceId + required: true + schema: + example: default + type: string + style: simple + - description: An identifier for the slo. + explode: false + in: path + name: sloId + required: true + schema: + example: 9c235211-6834-11ea-a78c-6feb38a34414 + type: string + style: simple + responses: + "204": + description: Successful request + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/400_response' + description: Bad request + "401": + content: + application/json: + schema: + $ref: '#/components/schemas/401_response' + description: Unauthorized response + "403": + content: + application/json: + schema: + $ref: '#/components/schemas/403_response' + description: Unauthorized response + "404": + content: + application/json: + schema: + $ref: '#/components/schemas/404_response' + description: Not found response + summary: Disable an SLO + tags: + - slo + /s/{spaceId}/api/observability/slos/{sloId}/_reset: + post: + description: | + You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + operationId: resetSloOp + parameters: - description: Cross-site request forgery protection explode: false in: header @@ -507,6 +602,10 @@ paths: style: simple responses: "200": + content: + application/json: + schema: + $ref: '#/components/schemas/slo_definition_response' description: Successful request "400": content: @@ -532,14 +631,14 @@ paths: schema: $ref: '#/components/schemas/404_response' description: Not found response - summary: Disables an SLO + summary: Reset an SLO tags: - slo - /s/{spaceId}/internal/observability/slos/_historical_summary: + /s/{spaceId}/api/observability/slos/_bulk_purge_rollup: post: description: | - You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. - operationId: historicalSummaryOp + The deletion occurs for the specified list of `sloId`. You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + operationId: deleteRollupDataOp parameters: - description: Cross-site request forgery protection explode: false @@ -563,14 +662,113 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/historical_summary_request' + $ref: '#/components/schemas/bulk_purge_rollup_request' + required: true + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/bulk_purge_rollup_response' + description: Successful request + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/400_response' + description: Bad request + "401": + content: + application/json: + schema: + $ref: '#/components/schemas/401_response' + description: Unauthorized response + "403": + content: + application/json: + schema: + $ref: '#/components/schemas/403_response' + description: Unauthorized response + summary: Batch delete rollup and summary data + tags: + - slo + /s/{spaceId}/internal/observability/slos/_definitions: + get: + description: | + You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + operationId: getDefinitionsOp + parameters: + - description: Cross-site request forgery protection + explode: false + in: header + name: kbn-xsrf required: true + schema: + type: string + style: simple + - description: "An identifier for the space. If `/s/` and the identifier are\ + \ omitted from the path, the default space is used." + explode: false + in: path + name: spaceId + required: true + schema: + example: default + type: string + style: simple + - description: Indicates if the API returns only outdated SLO or all SLO definitions + example: true + explode: true + in: query + name: includeOutdatedOnly + required: false + schema: + type: boolean + style: form + - description: Filters the SLOs by tag + explode: true + in: query + name: tags + required: false + schema: + type: string + style: form + - description: Filters the SLOs by name + example: my service availability + explode: true + in: query + name: search + required: false + schema: + type: string + style: form + - description: "The page to use for pagination, must be greater or equal than\ + \ 1" + example: 1 + explode: true + in: query + name: page + required: false + schema: + type: number + style: form + - description: Number of SLOs returned by page + example: 100 + explode: true + in: query + name: perPage + required: false + schema: + default: 100 + maximum: 1000 + type: integer + style: form responses: "200": content: application/json: schema: - $ref: '#/components/schemas/historical_summary_response' + $ref: '#/components/schemas/find_slo_definitions_response' description: Successful request "400": content: @@ -590,13 +788,13 @@ paths: schema: $ref: '#/components/schemas/403_response' description: Unauthorized response - summary: Retrieves the historical summary for a list of SLOs + summary: Get the SLO definitions tags: - slo /s/{spaceId}/api/observability/slos/_delete_instances: post: description: | - You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + The deletion occurs for the specified list of `sloId` and `instanceId`. You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. operationId: deleteSloInstancesOp parameters: - description: Cross-site request forgery protection @@ -644,10 +842,127 @@ paths: schema: $ref: '#/components/schemas/403_response' description: Unauthorized response - servers: - - url: https://localhost:5601 - summary: Batch delete rollup and summary data for the matching list of sloId - and instanceId + summary: Batch delete rollup and summary data + tags: + - slo + /s/{spaceId}/api/observability/slos/_bulk_delete: + post: + description: | + Bulk delete SLO definitions and their associated summary and rollup data. This endpoint initiates a bulk deletion operation for SLOs, which may take some time to complete. The status of the operation can be checked using the `GET /api/slo/_bulk_delete/{taskId}` endpoint. + operationId: bulkDeleteOp + parameters: + - description: Cross-site request forgery protection + explode: false + in: header + name: kbn-xsrf + required: true + schema: + type: string + style: simple + - description: "An identifier for the space. If `/s/` and the identifier are\ + \ omitted from the path, the default space is used." + explode: false + in: path + name: spaceId + required: true + schema: + example: default + type: string + style: simple + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/bulk_delete_request' + required: true + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/bulk_delete_response' + description: Successful response + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/400_response' + description: Bad request + "401": + content: + application/json: + schema: + $ref: '#/components/schemas/401_response' + description: Unauthorized response + "403": + content: + application/json: + schema: + $ref: '#/components/schemas/403_response' + description: Unauthorized response + summary: Bulk delete SLO definitions and their associated summary and rollup + data. + tags: + - slo + /s/{spaceId}/api/observability/slos/_bulk_delete/{taskId}: + get: + description: | + Retrieve the status of the bulk deletion operation for SLOs. This endpoint returns the status of the bulk deletion operation, including whether it is completed and the results of the operation. + operationId: bulkDeleteStatusOp + parameters: + - description: Cross-site request forgery protection + explode: false + in: header + name: kbn-xsrf + required: true + schema: + type: string + style: simple + - description: "An identifier for the space. If `/s/` and the identifier are\ + \ omitted from the path, the default space is used." + explode: false + in: path + name: spaceId + required: true + schema: + example: default + type: string + style: simple + - description: The task id of the bulk delete operation + explode: false + in: path + name: taskId + required: true + schema: + example: 8853df00-ae2e-11ed-90af-09bb6422b258 + type: string + style: simple + responses: + "200": + content: + application/json: + schema: + $ref: '#/components/schemas/bulk_delete_status_response' + description: Successful response + "400": + content: + application/json: + schema: + $ref: '#/components/schemas/400_response' + description: Bad request + "401": + content: + application/json: + schema: + $ref: '#/components/schemas/401_response' + description: Unauthorized response + "403": + content: + application/json: + schema: + $ref: '#/components/schemas/403_response' + description: Unauthorized response + summary: Retrieve the status of the bulk deletion tags: - slo components: @@ -697,8 +1012,71 @@ components: - type title: APM availability type: object + filter_meta: + description: Defines properties for a filter + properties: + alias: + nullable: true + type: string + disabled: + type: boolean + negate: + type: boolean + controlledBy: + type: string + group: + type: string + index: + type: string + isMultiIndex: + type: boolean + type: + type: string + key: + type: string + params: + type: object + value: + type: string + field: + type: string + title: FilterMeta + type: object + filter: + description: Defines properties for a filter + properties: + query: + type: object + meta: + $ref: '#/components/schemas/filter_meta' + title: Filter + type: object + kql_with_filters: + description: Defines properties for a filter + oneOf: + - description: the KQL query to filter the documents with. + example: "field.environment : \"production\" and service.name : \"my-service\"" + type: string + - $ref: '#/components/schemas/kql_with_filters_oneOf' + title: KQL with filters + kql_with_filters_good: + description: The KQL query used to define the good events. + oneOf: + - description: the KQL query to filter the documents with. + example: "request.latency <= 150 and request.status_code : \"2xx\"" + type: string + - $ref: '#/components/schemas/kql_with_filters_oneOf' + title: KQL query for good events + kql_with_filters_total: + description: The KQL query used to define all events. + oneOf: + - description: the KQL query to filter the documents with. + example: "field.environment : \"production\" and service.name : \"my-service\"" + type: string + - $ref: '#/components/schemas/kql_with_filters_oneOf' + title: KQL query for all events indicator_properties_custom_kql: - description: Defines properties for a custom KQL indicator type + description: Defines properties for a custom query indicator type properties: params: $ref: '#/components/schemas/indicator_properties_custom_kql_params' @@ -709,7 +1087,7 @@ components: required: - params - type - title: Custom KQL + title: Custom Query type: object indicator_properties_apm_latency: description: Defines properties for the APM latency indicator type @@ -753,7 +1131,7 @@ components: - type title: Histogram indicator type: object - timeslice_metric_basic_metric_with_field: + timeslice_metric_percentile_metric: properties: name: description: The name of the metric. Only valid options are A-Z @@ -761,21 +1139,19 @@ components: pattern: "^[A-Z]$" type: string aggregation: - description: The aggregation type of the metric. + description: The aggregation type of the metric. Only valid option is "percentile" enum: - - sum - - avg - - min - - max - - std_deviation - - last_value - - cardinality - example: sum + - percentile + example: percentile type: string field: description: The field of the metric. example: processor.processed type: string + percentile: + description: The percentile value. + example: 95 + type: number filter: description: The filter to apply to the metric. example: "processor.outcome: \"success\"" @@ -784,9 +1160,10 @@ components: - aggregation - field - name - title: Timeslice Metric Basic Metric with Field + - percentile + title: Timeslice Metric Percentile Metric type: object - timeslice_metric_percentile_metric: + timeslice_metric_doc_count_metric: properties: name: description: The name of the metric. Only valid options are A-Z @@ -794,42 +1171,42 @@ components: pattern: "^[A-Z]$" type: string aggregation: - description: The aggregation type of the metric. Only valid option is "percentile" + description: The aggregation type of the metric. Only valid option is "doc_count" enum: - - percentile - example: percentile - type: string - field: - description: The field of the metric. - example: processor.processed + - doc_count + example: doc_count type: string - percentile: - description: The percentile value. - example: 95 - type: number filter: description: The filter to apply to the metric. example: "processor.outcome: \"success\"" type: string required: - aggregation - - field - name - - percentile - title: Timeslice Metric Percentile Metric + title: Timeslice Metric Doc Count Metric type: object - timeslice_metric_doc_count_metric: + timeslice_metric_basic_metric_with_field: properties: name: description: The name of the metric. Only valid options are A-Z example: A pattern: "^[A-Z]$" type: string - aggregation: - description: The aggregation type of the metric. Only valid option is "doc_count" - enum: - - doc_count - example: doc_count + aggregation: + description: The aggregation type of the metric. + enum: + - sum + - avg + - min + - max + - std_deviation + - last_value + - cardinality + example: sum + type: string + field: + description: The field of the metric. + example: processor.processed type: string filter: description: The filter to apply to the metric. @@ -837,8 +1214,9 @@ components: type: string required: - aggregation + - field - name - title: Timeslice Metric Doc Count Metric + title: Timeslice Metric Basic Metric with Field type: object indicator_properties_timeslice_metric: description: Defines properties for a timeslice metric indicator type @@ -897,11 +1275,17 @@ components: target: description: the target objective between 0 and 1 excluded example: 0.99 + exclusiveMaximum: true + exclusiveMinimum: true + maximum: 100 + minimum: 0 type: number timesliceTarget: description: the target objective for each slice when using a timeslices budgeting method example: 0.995 + maximum: 100 + minimum: 0 type: number timesliceWindow: description: "the duration of each slice when using a timeslices budgeting\ @@ -915,17 +1299,40 @@ components: settings: description: Defines properties for SLO settings. example: + syncField: event.ingested + preventInitialBackfill: true syncDelay: 5m frequency: 5m properties: + syncField: + description: "The date field that is used to identify new documents in the\ + \ source. It is strongly recommended to use a field that contains the\ + \ ingest timestamp. If you use a different field, you might need to set\ + \ the delay such that it accounts for data transmission delays. When unspecified,\ + \ we use the indicator timestamp field." + example: event.ingested + type: string syncDelay: - description: The synch delay to apply to the transform. Default 1m + default: 1m + description: The time delay in minutes between the current time and the + latest source data time. Increasing the value will delay any alerting. + The default value is 1 minute. The minimum value is 1m and the maximum + is 359m. It should always be greater then source index refresh interval. example: 5m type: string frequency: - description: "Configure how often the transform runs, default 1m" + default: 1m + description: The interval between checks for changes in the source data. + The minimum value is 1m and the maximum is 59m. The default value is 1 + minute. example: 5m type: string + preventInitialBackfill: + default: false + description: "Start aggregating data from the time the SLO is created, instead\ + \ of backfilling data from the beginning of the time window." + example: true + type: boolean title: Settings type: object summary_status: @@ -994,7 +1401,21 @@ components: - status title: Summary type: object - slo_response: + group_by: + description: optional group by field or fields to use to generate an SLO per + distinct value + example: + - - service.name + - service.name + - - service.name + - service.environment + oneOf: + - type: string + - items: + type: string + type: array + title: Group by + slo_with_summary_response: example: indicator: null summary: @@ -1006,13 +1427,20 @@ components: status: HEALTHY sliValue: 0.9836 settings: + syncField: event.ingested + preventInitialBackfill: true syncDelay: 5m frequency: 5m timeWindow: duration: 30d type: rolling description: My SLO description - groupBy: some.field + groupBy: + - - service.name + - service.name + - - service.name + - service.environment + version: 2 enabled: true objective: timesliceWindow: 5m @@ -1042,7 +1470,7 @@ components: example: My SLO description type: string indicator: - $ref: '#/components/schemas/slo_response_indicator' + $ref: '#/components/schemas/slo_with_summary_response_indicator' timeWindow: $ref: '#/components/schemas/time_window' budgetingMethod: @@ -1062,7 +1490,7 @@ components: example: true type: boolean groupBy: - $ref: '#/components/schemas/slo_response_groupBy' + $ref: '#/components/schemas/group_by' instanceId: description: "the value derived from the groupBy field, if present, otherwise\ \ '*'" @@ -1081,6 +1509,10 @@ components: description: The last update date example: 2023-01-12T10:03:19.000Z type: string + version: + description: The internal SLO version + example: 2 + type: number required: - budgetingMethod - createdAt @@ -1098,6 +1530,7 @@ components: - tags - timeWindow - updatedAt + - version title: SLO response type: object find_slo_response: @@ -1106,6 +1539,8 @@ components: example: total: 34 perPage: 25 + size: 25 + searchAfter: searchAfter page: 1 results: - indicator: null @@ -1118,13 +1553,20 @@ components: status: HEALTHY sliValue: 0.9836 settings: + syncField: event.ingested + preventInitialBackfill: true syncDelay: 5m frequency: 5m timeWindow: duration: 30d type: rolling description: My SLO description - groupBy: some.field + groupBy: + - - service.name + - service.name + - - service.name + - service.environment + version: 2 enabled: true objective: timesliceWindow: 5m @@ -1150,13 +1592,20 @@ components: status: HEALTHY sliValue: 0.9836 settings: + syncField: event.ingested + preventInitialBackfill: true syncDelay: 5m frequency: 5m timeWindow: duration: 30d type: rolling description: My SLO description - groupBy: some.field + groupBy: + - - service.name + - service.name + - - service.name + - service.environment + version: 2 enabled: true objective: timesliceWindow: 5m @@ -1173,6 +1622,12 @@ components: budgetingMethod: occurrences updatedAt: 2023-01-12T10:03:19.000Z properties: + size: + description: Size provided for cursor based pagination + example: 25 + type: number + searchAfter: + type: string page: example: 1 type: number @@ -1184,7 +1639,7 @@ components: type: number results: items: - $ref: '#/components/schemas/slo_response' + $ref: '#/components/schemas/slo_with_summary_response' type: array title: Find SLO response type: object @@ -1286,7 +1741,7 @@ components: settings: $ref: '#/components/schemas/settings' groupBy: - $ref: '#/components/schemas/slo_response_groupBy' + $ref: '#/components/schemas/group_by' tags: description: List of tags items: @@ -1350,7 +1805,7 @@ components: settings: $ref: '#/components/schemas/settings' groupBy: - $ref: '#/components/schemas/slo_response_groupBy' + $ref: '#/components/schemas/group_by' tags: description: List of tags items: @@ -1358,24 +1813,143 @@ components: type: array title: Update SLO request type: object - historical_summary_request: + slo_definition_response: + example: + indicator: null + settings: + syncField: event.ingested + preventInitialBackfill: true + syncDelay: 5m + frequency: 5m + timeWindow: + duration: 30d + type: rolling + description: My SLO description + groupBy: + - - service.name + - service.name + - - service.name + - service.environment + version: 2 + enabled: true + objective: + timesliceWindow: 5m + timesliceTarget: 0.995 + target: 0.99 + revision: 2 + tags: + - tags + - tags + createdAt: 2023-01-12T10:03:19.000Z + name: My Service SLO + id: 8853df00-ae2e-11ed-90af-09bb6422b258 + budgetingMethod: occurrences + updatedAt: 2023-01-12T10:03:19.000Z + properties: + id: + description: The identifier of the SLO. + example: 8853df00-ae2e-11ed-90af-09bb6422b258 + type: string + name: + description: The name of the SLO. + example: My Service SLO + type: string + description: + description: The description of the SLO. + example: My SLO description + type: string + indicator: + $ref: '#/components/schemas/slo_with_summary_response_indicator' + timeWindow: + $ref: '#/components/schemas/time_window' + budgetingMethod: + $ref: '#/components/schemas/budgeting_method' + objective: + $ref: '#/components/schemas/objective' + settings: + $ref: '#/components/schemas/settings' + revision: + description: The SLO revision + example: 2 + type: number + enabled: + description: Indicate if the SLO is enabled + example: true + type: boolean + groupBy: + $ref: '#/components/schemas/group_by' + tags: + description: List of tags + items: + type: string + type: array + createdAt: + description: The creation date + example: 2023-01-12T10:03:19.000Z + type: string + updatedAt: + description: The last update date + example: 2023-01-12T10:03:19.000Z + type: string + version: + description: The internal SLO version + example: 2 + type: number + required: + - budgetingMethod + - createdAt + - description + - enabled + - groupBy + - id + - indicator + - name + - objective + - revision + - settings + - tags + - timeWindow + - updatedAt + - version + title: SLO definition response + type: object + bulk_purge_rollup_request: + description: | + The bulk purge rollup data request takes a list of SLO ids and a purge policy, then deletes the rollup data according to the purge policy. This API can be used to remove the staled data of an instance SLO that no longer get updated. properties: list: - description: The list of SLO identifiers to get the historical summary for + description: An array of slo ids items: + description: The SLO Definition id example: 8853df00-ae2e-11ed-90af-09bb6422b258 type: string type: array + purgePolicy: + $ref: '#/components/schemas/bulk_purge_rollup_request_purgePolicy' required: - list - title: Historical summary request + - purgePolicy + title: Bulk Purge Rollup data request type: object - historical_summary_response: - additionalProperties: - items: - $ref: '#/components/schemas/Historical_summary_response_inner' - type: array - title: Historical summary response + bulk_purge_rollup_response: + description: | + The bulk purge rollup data response returns a task id from the elasticsearch deleteByQuery response. + example: + taskId: 8853df00-ae2e-11ed-90af-09bb6422b258 + properties: + taskId: + description: The task id of the purge operation + example: 8853df00-ae2e-11ed-90af-09bb6422b258 + type: string + title: Bulk Purge Rollup data response + type: object + find_slo_definitions_response: + description: | + A paginated response of SLO definitions matching the query. + oneOf: + - $ref: '#/components/schemas/find_slo_definitions_response_oneOf' + - $ref: '#/components/schemas/find_slo_definitions_response_oneOf_1' + title: Find SLO definitions response type: object delete_slo_instances_request: description: | @@ -1390,6 +1964,63 @@ components: - list title: Delete SLO instances request type: object + bulk_delete_request: + description: | + The bulk delete SLO request takes a list of SLOs Definition id to delete. + properties: + list: + description: An array of SLO Definition id + items: + description: The SLO Definition id + example: 8853df00-ae2e-11ed-90af-09bb6422b258 + type: string + type: array + required: + - list + title: Bulk delete SLO request + type: object + bulk_delete_response: + description: | + The bulk delete SLO response returns a taskId that can be used to poll for its status + example: + taskId: d08506b7-f0e8-4f8b-a06a-a83940f4db91 + properties: + taskId: + description: The taskId of the bulk delete operation + example: d08506b7-f0e8-4f8b-a06a-a83940f4db91 + type: string + title: Bulk delete SLO response + type: object + bulk_delete_status_response: + description: "Indicates if the bulk deletion is completed, with the detailed\ + \ results of the operation." + example: + error: Task not found + isDone: true + results: + - success: true + id: d08506b7-f0e8-4f8b-a06a-a83940f4db91 + error: "SLO [d08506b7-f0e8-4f8b-a06a-a83940f4db91] not found" + - success: true + id: d08506b7-f0e8-4f8b-a06a-a83940f4db91 + error: "SLO [d08506b7-f0e8-4f8b-a06a-a83940f4db91] not found" + properties: + isDone: + description: Indicates if the bulk deletion operation is completed + example: true + type: boolean + error: + description: The error message if the bulk deletion operation failed + example: Task not found + type: string + results: + description: "The results of the bulk deletion operation, including the\ + \ success status and any errors for each SLO" + items: + $ref: '#/components/schemas/bulk_delete_status_response_results_inner' + type: array + title: The status of the bulk deletion + type: object indicator_properties_apm_availability_params: description: An object containing the indicator parameters. nullable: false @@ -1425,6 +2056,15 @@ components: - transactionName - transactionType type: object + kql_with_filters_oneOf: + properties: + kqlQuery: + type: string + filters: + items: + $ref: '#/components/schemas/filter' + type: array + type: object indicator_properties_custom_kql_params: description: An object containing the indicator parameters. nullable: false @@ -1433,18 +2073,19 @@ components: description: The index or index pattern to use example: my-service-* type: string - filter: - description: the KQL query to filter the documents with. - example: "field.environment : \"production\" and service.name : \"my-service\"" + dataViewId: + description: "The kibana data view id to use, primarily used to include\ + \ data view runtime mappings. Make sure to save SLO again if you add/update\ + \ run time fields to the data view and if those fields are being used\ + \ in slo queries." + example: 03b80ab3-003d-498b-881c-3beedbaf1162 type: string + filter: + $ref: '#/components/schemas/kql_with_filters' good: - description: the KQL query used to define the good events. - example: "request.latency <= 150 and request.status_code : \"2xx\"" - type: string + $ref: '#/components/schemas/kql_with_filters_good' total: - description: the KQL query used to define all events. - example: "" - type: string + $ref: '#/components/schemas/kql_with_filters_total' timestampField: description: | The timestamp field used in the source indice. @@ -1504,9 +2145,10 @@ components: pattern: "^[A-Z]$" type: string aggregation: - description: The aggregation type of the metric. Only valid option is "sum" + description: The aggregation type of the metric. enum: - sum + - doc_count example: sum type: string field: @@ -1515,7 +2157,7 @@ components: type: string filter: description: The filter to apply to the metric. - example: "processor.outcome: \"success\"" + example: "processor.outcome: *" type: string required: - aggregation @@ -1539,32 +2181,6 @@ components: - equation - metrics type: object - indicator_properties_custom_metric_params_total_metrics_inner: - properties: - name: - description: The name of the metric. Only valid options are A-Z - example: A - pattern: "^[A-Z]$" - type: string - aggregation: - description: The aggregation type of the metric. Only valid option is "sum" - enum: - - sum - example: sum - type: string - field: - description: The field of the metric. - example: processor.processed - type: string - filter: - description: The filter to apply to the metric. - example: "processor.outcome: *" - type: string - required: - - aggregation - - field - - name - type: object indicator_properties_custom_metric_params_total: description: | An object defining the "total" metrics and equation @@ -1572,7 +2188,7 @@ components: metrics: description: "List of metrics with their name, aggregation type, and field." items: - $ref: '#/components/schemas/indicator_properties_custom_metric_params_total_metrics_inner' + $ref: '#/components/schemas/indicator_properties_custom_metric_params_good_metrics_inner' type: array equation: description: The equation to calculate the "total" metric. @@ -1590,6 +2206,13 @@ components: description: The index or index pattern to use example: my-service-* type: string + dataViewId: + description: "The kibana data view id to use, primarily used to include\ + \ data view runtime mappings. Make sure to save SLO again if you add/update\ + \ run time fields to the data view and if those fields are being used\ + \ in slo queries." + example: 03b80ab3-003d-498b-881c-3beedbaf1162 + type: string filter: description: the KQL query to filter the documents with. example: "field.environment : \"production\" and service.name : \"my-service\"" @@ -1681,6 +2304,13 @@ components: description: The index or index pattern to use example: my-service-* type: string + dataViewId: + description: "The kibana data view id to use, primarily used to include\ + \ data view runtime mappings. Make sure to save SLO again if you add/update\ + \ run time fields to the data view and if those fields are being used\ + \ in slo queries." + example: 03b80ab3-003d-498b-881c-3beedbaf1162 + type: string filter: description: the KQL query to filter the documents with. example: "field.environment : \"production\" and service.name : \"my-service\"" @@ -1758,6 +2388,13 @@ components: description: The index or index pattern to use example: my-service-* type: string + dataViewId: + description: "The kibana data view id to use, primarily used to include\ + \ data view runtime mappings. Make sure to save SLO again if you add/update\ + \ run time fields to the data view and if those fields are being used\ + \ in slo queries." + example: 03b80ab3-003d-498b-881c-3beedbaf1162 + type: string filter: description: the KQL query to filter the documents with. example: "field.environment : \"production\" and service.name : \"my-service\"" @@ -1774,7 +2411,7 @@ components: - metric - timestampField type: object - slo_response_indicator: + slo_with_summary_response_indicator: discriminator: mapping: sli.apm.transactionErrorRate: '#/components/schemas/indicator_properties_apm_availability' @@ -1791,15 +2428,6 @@ components: - $ref: '#/components/schemas/indicator_properties_custom_metric' - $ref: '#/components/schemas/indicator_properties_histogram' - $ref: '#/components/schemas/indicator_properties_timeslice_metric' - slo_response_groupBy: - description: optional group by field to use to generate an SLO per distinct - value - example: some.field - oneOf: - - type: string - - items: - type: string - type: array create_slo_request_indicator: oneOf: - $ref: '#/components/schemas/indicator_properties_custom_kql' @@ -1808,18 +2436,86 @@ components: - $ref: '#/components/schemas/indicator_properties_custom_metric' - $ref: '#/components/schemas/indicator_properties_histogram' - $ref: '#/components/schemas/indicator_properties_timeslice_metric' - Historical_summary_response_inner: + bulk_purge_rollup_request_purgePolicy_oneOf: properties: - date: - example: 2022-01-01T00:00:00.000Z + purgeType: + description: Specifies whether documents will be purged based on a specific + age or on a timestamp + enum: + - fixed-age type: string - status: - $ref: '#/components/schemas/summary_status' - sliValue: - example: 0.9836 + age: + description: "The duration to determine which documents to purge, formatted\ + \ as {duration}{unit}. This value should be greater than or equal to the\ + \ time window of every SLO provided." + example: 7d + type: string + type: object + bulk_purge_rollup_request_purgePolicy_oneOf_1: + properties: + purgeType: + description: Specifies whether documents will be purged based on a specific + age or on a timestamp + enum: + - fixed-time + type: string + timestamp: + description: "The timestamp to determine which documents to purge, formatted\ + \ in ISO. This value should be older than the applicable time window of\ + \ every SLO provided." + example: 2024-12-31T00:00:00.000Z + type: string + type: object + bulk_purge_rollup_request_purgePolicy: + description: Policy that dictates which SLI documents to purge based on age + oneOf: + - $ref: '#/components/schemas/bulk_purge_rollup_request_purgePolicy_oneOf' + - $ref: '#/components/schemas/bulk_purge_rollup_request_purgePolicy_oneOf_1' + type: object + find_slo_definitions_response_oneOf: + properties: + page: + example: 1 type: number - errorBudget: - $ref: '#/components/schemas/error_budget' + perPage: + example: 25 + type: number + total: + example: 34 + type: number + results: + items: + $ref: '#/components/schemas/slo_with_summary_response' + type: array + type: object + find_slo_definitions_response_oneOf_1: + properties: + page: + default: 1 + description: for backward compability + type: number + perPage: + description: for backward compability + example: 25 + type: number + size: + example: 25 + type: number + searchAfter: + description: the cursor to provide to get the next paged results + example: + - some-slo-id + - other-cursor-id + items: + type: string + type: array + total: + example: 34 + type: number + results: + items: + $ref: '#/components/schemas/slo_with_summary_response' + type: array type: object delete_slo_instances_request_list_inner: properties: @@ -1835,6 +2531,26 @@ components: - instanceId - sloId type: object + bulk_delete_status_response_results_inner: + example: + success: true + id: d08506b7-f0e8-4f8b-a06a-a83940f4db91 + error: "SLO [d08506b7-f0e8-4f8b-a06a-a83940f4db91] not found" + properties: + id: + description: The ID of the SLO that was deleted + example: d08506b7-f0e8-4f8b-a06a-a83940f4db91 + type: string + success: + description: The result of the deletion operation for this SLO + example: true + type: boolean + error: + description: The error message if the deletion operation failed for this + SLO + example: "SLO [d08506b7-f0e8-4f8b-a06a-a83940f4db91] not found" + type: string + type: object securitySchemes: basicAuth: scheme: basic diff --git a/generated/slo/api_slo.go b/generated/slo/api_slo.go index d4340c8c7..ea94c0d19 100644 --- a/generated/slo/api_slo.go +++ b/generated/slo/api_slo.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -16,13 +16,47 @@ import ( "io" "net/http" "net/url" + "reflect" "strings" ) type SloAPI interface { /* - CreateSloOp Creates an SLO. + BulkDeleteOp Bulk delete SLO definitions and their associated summary and rollup data. + + Bulk delete SLO definitions and their associated summary and rollup data. This endpoint initiates a bulk deletion operation for SLOs, which may take some time to complete. The status of the operation can be checked using the `GET /api/slo/_bulk_delete/{taskId}` endpoint. + + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + @return ApiBulkDeleteOpRequest + */ + BulkDeleteOp(ctx context.Context, spaceId string) ApiBulkDeleteOpRequest + + // BulkDeleteOpExecute executes the request + // @return BulkDeleteResponse + BulkDeleteOpExecute(r ApiBulkDeleteOpRequest) (*BulkDeleteResponse, *http.Response, error) + + /* + BulkDeleteStatusOp Retrieve the status of the bulk deletion + + Retrieve the status of the bulk deletion operation for SLOs. This endpoint returns the status of the bulk deletion operation, including whether it is completed and the results of the operation. + + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + @param taskId The task id of the bulk delete operation + @return ApiBulkDeleteStatusOpRequest + */ + BulkDeleteStatusOp(ctx context.Context, spaceId string, taskId string) ApiBulkDeleteStatusOpRequest + + // BulkDeleteStatusOpExecute executes the request + // @return BulkDeleteStatusResponse + BulkDeleteStatusOpExecute(r ApiBulkDeleteStatusOpRequest) (*BulkDeleteStatusResponse, *http.Response, error) + + /* + CreateSloOp Create an SLO You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -38,9 +72,25 @@ type SloAPI interface { CreateSloOpExecute(r ApiCreateSloOpRequest) (*CreateSloResponse, *http.Response, error) /* - DeleteSloInstancesOp Batch delete rollup and summary data for the matching list of sloId and instanceId + DeleteRollupDataOp Batch delete rollup and summary data - You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + The deletion occurs for the specified list of `sloId`. You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + @return ApiDeleteRollupDataOpRequest + */ + DeleteRollupDataOp(ctx context.Context, spaceId string) ApiDeleteRollupDataOpRequest + + // DeleteRollupDataOpExecute executes the request + // @return BulkPurgeRollupResponse + DeleteRollupDataOpExecute(r ApiDeleteRollupDataOpRequest) (*BulkPurgeRollupResponse, *http.Response, error) + + /* + DeleteSloInstancesOp Batch delete rollup and summary data + + The deletion occurs for the specified list of `sloId` and `instanceId`. You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @@ -53,7 +103,7 @@ type SloAPI interface { DeleteSloInstancesOpExecute(r ApiDeleteSloInstancesOpRequest) (*http.Response, error) /* - DeleteSloOp Deletes an SLO + DeleteSloOp Delete an SLO You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -69,7 +119,7 @@ type SloAPI interface { DeleteSloOpExecute(r ApiDeleteSloOpRequest) (*http.Response, error) /* - DisableSloOp Disables an SLO + DisableSloOp Disable an SLO You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -85,7 +135,7 @@ type SloAPI interface { DisableSloOpExecute(r ApiDisableSloOpRequest) (*http.Response, error) /* - EnableSloOp Enables an SLO + EnableSloOp Enable an SLO You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -101,7 +151,7 @@ type SloAPI interface { EnableSloOpExecute(r ApiEnableSloOpRequest) (*http.Response, error) /* - FindSlosOp Retrieves a paginated list of SLOs + FindSlosOp Get a paginated list of SLOs You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -117,7 +167,23 @@ type SloAPI interface { FindSlosOpExecute(r ApiFindSlosOpRequest) (*FindSloResponse, *http.Response, error) /* - GetSloOp Retrieves a SLO + GetDefinitionsOp Get the SLO definitions + + You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + @return ApiGetDefinitionsOpRequest + */ + GetDefinitionsOp(ctx context.Context, spaceId string) ApiGetDefinitionsOpRequest + + // GetDefinitionsOpExecute executes the request + // @return FindSloDefinitionsResponse + GetDefinitionsOpExecute(r ApiGetDefinitionsOpRequest) (*FindSloDefinitionsResponse, *http.Response, error) + + /* + GetSloOp Get an SLO You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -130,27 +196,28 @@ type SloAPI interface { GetSloOp(ctx context.Context, spaceId string, sloId string) ApiGetSloOpRequest // GetSloOpExecute executes the request - // @return SloResponse - GetSloOpExecute(r ApiGetSloOpRequest) (*SloResponse, *http.Response, error) + // @return SloWithSummaryResponse + GetSloOpExecute(r ApiGetSloOpRequest) (*SloWithSummaryResponse, *http.Response, error) /* - HistoricalSummaryOp Retrieves the historical summary for a list of SLOs + ResetSloOp Reset an SLO - You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. - @return ApiHistoricalSummaryOpRequest + @param sloId An identifier for the slo. + @return ApiResetSloOpRequest */ - HistoricalSummaryOp(ctx context.Context, spaceId string) ApiHistoricalSummaryOpRequest + ResetSloOp(ctx context.Context, spaceId string, sloId string) ApiResetSloOpRequest - // HistoricalSummaryOpExecute executes the request - // @return map[string][]HistoricalSummaryResponseInner - HistoricalSummaryOpExecute(r ApiHistoricalSummaryOpRequest) (*map[string][]HistoricalSummaryResponseInner, *http.Response, error) + // ResetSloOpExecute executes the request + // @return SloDefinitionResponse + ResetSloOpExecute(r ApiResetSloOpRequest) (*SloDefinitionResponse, *http.Response, error) /* - UpdateSloOp Updates an SLO + UpdateSloOp Update an SLO You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -162,48 +229,568 @@ type SloAPI interface { */ UpdateSloOp(ctx context.Context, spaceId string, sloId string) ApiUpdateSloOpRequest - // UpdateSloOpExecute executes the request - // @return SloResponse - UpdateSloOpExecute(r ApiUpdateSloOpRequest) (*SloResponse, *http.Response, error) + // UpdateSloOpExecute executes the request + // @return SloDefinitionResponse + UpdateSloOpExecute(r ApiUpdateSloOpRequest) (*SloDefinitionResponse, *http.Response, error) +} + +// SloAPIService SloAPI service +type SloAPIService service + +type ApiBulkDeleteOpRequest struct { + ctx context.Context + ApiService SloAPI + kbnXsrf *string + spaceId string + bulkDeleteRequest *BulkDeleteRequest +} + +// Cross-site request forgery protection +func (r ApiBulkDeleteOpRequest) KbnXsrf(kbnXsrf string) ApiBulkDeleteOpRequest { + r.kbnXsrf = &kbnXsrf + return r +} + +func (r ApiBulkDeleteOpRequest) BulkDeleteRequest(bulkDeleteRequest BulkDeleteRequest) ApiBulkDeleteOpRequest { + r.bulkDeleteRequest = &bulkDeleteRequest + return r +} + +func (r ApiBulkDeleteOpRequest) Execute() (*BulkDeleteResponse, *http.Response, error) { + return r.ApiService.BulkDeleteOpExecute(r) +} + +/* +BulkDeleteOp Bulk delete SLO definitions and their associated summary and rollup data. + +Bulk delete SLO definitions and their associated summary and rollup data. This endpoint initiates a bulk deletion operation for SLOs, which may take some time to complete. The status of the operation can be checked using the `GET /api/slo/_bulk_delete/{taskId}` endpoint. + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + @return ApiBulkDeleteOpRequest +*/ +func (a *SloAPIService) BulkDeleteOp(ctx context.Context, spaceId string) ApiBulkDeleteOpRequest { + return ApiBulkDeleteOpRequest{ + ApiService: a, + ctx: ctx, + spaceId: spaceId, + } +} + +// Execute executes the request +// +// @return BulkDeleteResponse +func (a *SloAPIService) BulkDeleteOpExecute(r ApiBulkDeleteOpRequest) (*BulkDeleteResponse, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodPost + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *BulkDeleteResponse + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "SloAPIService.BulkDeleteOp") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/s/{spaceId}/api/observability/slos/_bulk_delete" + localVarPath = strings.Replace(localVarPath, "{"+"spaceId"+"}", url.PathEscape(parameterValueToString(r.spaceId, "spaceId")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + if r.kbnXsrf == nil { + return localVarReturnValue, nil, reportError("kbnXsrf is required and must be specified") + } + if r.bulkDeleteRequest == nil { + return localVarReturnValue, nil, reportError("bulkDeleteRequest is required and must be specified") + } + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + parameterAddToHeaderOrQuery(localVarHeaderParams, "kbn-xsrf", r.kbnXsrf, "") + // body params + localVarPostBody = r.bulkDeleteRequest + if r.ctx != nil { + // API Key Authentication + if auth, ok := r.ctx.Value(ContextAPIKeys).(map[string]APIKey); ok { + if apiKey, ok := auth["apiKeyAuth"]; ok { + var key string + if apiKey.Prefix != "" { + key = apiKey.Prefix + " " + apiKey.Key + } else { + key = apiKey.Key + } + localVarHeaderParams["Authorization"] = key + } + } + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + if localVarHTTPResponse.StatusCode == 400 { + var v Model400Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 401 { + var v Model401Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 403 { + var v Model403Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + +type ApiBulkDeleteStatusOpRequest struct { + ctx context.Context + ApiService SloAPI + kbnXsrf *string + spaceId string + taskId string +} + +// Cross-site request forgery protection +func (r ApiBulkDeleteStatusOpRequest) KbnXsrf(kbnXsrf string) ApiBulkDeleteStatusOpRequest { + r.kbnXsrf = &kbnXsrf + return r +} + +func (r ApiBulkDeleteStatusOpRequest) Execute() (*BulkDeleteStatusResponse, *http.Response, error) { + return r.ApiService.BulkDeleteStatusOpExecute(r) +} + +/* +BulkDeleteStatusOp Retrieve the status of the bulk deletion + +Retrieve the status of the bulk deletion operation for SLOs. This endpoint returns the status of the bulk deletion operation, including whether it is completed and the results of the operation. + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + @param taskId The task id of the bulk delete operation + @return ApiBulkDeleteStatusOpRequest +*/ +func (a *SloAPIService) BulkDeleteStatusOp(ctx context.Context, spaceId string, taskId string) ApiBulkDeleteStatusOpRequest { + return ApiBulkDeleteStatusOpRequest{ + ApiService: a, + ctx: ctx, + spaceId: spaceId, + taskId: taskId, + } +} + +// Execute executes the request +// +// @return BulkDeleteStatusResponse +func (a *SloAPIService) BulkDeleteStatusOpExecute(r ApiBulkDeleteStatusOpRequest) (*BulkDeleteStatusResponse, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodGet + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *BulkDeleteStatusResponse + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "SloAPIService.BulkDeleteStatusOp") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/s/{spaceId}/api/observability/slos/_bulk_delete/{taskId}" + localVarPath = strings.Replace(localVarPath, "{"+"spaceId"+"}", url.PathEscape(parameterValueToString(r.spaceId, "spaceId")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"taskId"+"}", url.PathEscape(parameterValueToString(r.taskId, "taskId")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + if r.kbnXsrf == nil { + return localVarReturnValue, nil, reportError("kbnXsrf is required and must be specified") + } + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + parameterAddToHeaderOrQuery(localVarHeaderParams, "kbn-xsrf", r.kbnXsrf, "") + if r.ctx != nil { + // API Key Authentication + if auth, ok := r.ctx.Value(ContextAPIKeys).(map[string]APIKey); ok { + if apiKey, ok := auth["apiKeyAuth"]; ok { + var key string + if apiKey.Prefix != "" { + key = apiKey.Prefix + " " + apiKey.Key + } else { + key = apiKey.Key + } + localVarHeaderParams["Authorization"] = key + } + } + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + if localVarHTTPResponse.StatusCode == 400 { + var v Model400Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 401 { + var v Model401Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 403 { + var v Model403Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + +type ApiCreateSloOpRequest struct { + ctx context.Context + ApiService SloAPI + kbnXsrf *string + spaceId string + createSloRequest *CreateSloRequest +} + +// Cross-site request forgery protection +func (r ApiCreateSloOpRequest) KbnXsrf(kbnXsrf string) ApiCreateSloOpRequest { + r.kbnXsrf = &kbnXsrf + return r +} + +func (r ApiCreateSloOpRequest) CreateSloRequest(createSloRequest CreateSloRequest) ApiCreateSloOpRequest { + r.createSloRequest = &createSloRequest + return r +} + +func (r ApiCreateSloOpRequest) Execute() (*CreateSloResponse, *http.Response, error) { + return r.ApiService.CreateSloOpExecute(r) +} + +/* +CreateSloOp Create an SLO + +You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + @return ApiCreateSloOpRequest +*/ +func (a *SloAPIService) CreateSloOp(ctx context.Context, spaceId string) ApiCreateSloOpRequest { + return ApiCreateSloOpRequest{ + ApiService: a, + ctx: ctx, + spaceId: spaceId, + } +} + +// Execute executes the request +// +// @return CreateSloResponse +func (a *SloAPIService) CreateSloOpExecute(r ApiCreateSloOpRequest) (*CreateSloResponse, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodPost + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *CreateSloResponse + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "SloAPIService.CreateSloOp") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/s/{spaceId}/api/observability/slos" + localVarPath = strings.Replace(localVarPath, "{"+"spaceId"+"}", url.PathEscape(parameterValueToString(r.spaceId, "spaceId")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + if r.kbnXsrf == nil { + return localVarReturnValue, nil, reportError("kbnXsrf is required and must be specified") + } + if r.createSloRequest == nil { + return localVarReturnValue, nil, reportError("createSloRequest is required and must be specified") + } + + // to determine the Content-Type header + localVarHTTPContentTypes := []string{"application/json"} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + parameterAddToHeaderOrQuery(localVarHeaderParams, "kbn-xsrf", r.kbnXsrf, "") + // body params + localVarPostBody = r.createSloRequest + if r.ctx != nil { + // API Key Authentication + if auth, ok := r.ctx.Value(ContextAPIKeys).(map[string]APIKey); ok { + if apiKey, ok := auth["apiKeyAuth"]; ok { + var key string + if apiKey.Prefix != "" { + key = apiKey.Prefix + " " + apiKey.Key + } else { + key = apiKey.Key + } + localVarHeaderParams["Authorization"] = key + } + } + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + if localVarHTTPResponse.StatusCode == 400 { + var v Model400Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 401 { + var v Model401Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 403 { + var v Model403Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 409 { + var v Model409Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil } -// SloAPIService SloAPI service -type SloAPIService service - -type ApiCreateSloOpRequest struct { - ctx context.Context - ApiService SloAPI - kbnXsrf *string - spaceId string - createSloRequest *CreateSloRequest +type ApiDeleteRollupDataOpRequest struct { + ctx context.Context + ApiService SloAPI + kbnXsrf *string + spaceId string + bulkPurgeRollupRequest *BulkPurgeRollupRequest } // Cross-site request forgery protection -func (r ApiCreateSloOpRequest) KbnXsrf(kbnXsrf string) ApiCreateSloOpRequest { +func (r ApiDeleteRollupDataOpRequest) KbnXsrf(kbnXsrf string) ApiDeleteRollupDataOpRequest { r.kbnXsrf = &kbnXsrf return r } -func (r ApiCreateSloOpRequest) CreateSloRequest(createSloRequest CreateSloRequest) ApiCreateSloOpRequest { - r.createSloRequest = &createSloRequest +func (r ApiDeleteRollupDataOpRequest) BulkPurgeRollupRequest(bulkPurgeRollupRequest BulkPurgeRollupRequest) ApiDeleteRollupDataOpRequest { + r.bulkPurgeRollupRequest = &bulkPurgeRollupRequest return r } -func (r ApiCreateSloOpRequest) Execute() (*CreateSloResponse, *http.Response, error) { - return r.ApiService.CreateSloOpExecute(r) +func (r ApiDeleteRollupDataOpRequest) Execute() (*BulkPurgeRollupResponse, *http.Response, error) { + return r.ApiService.DeleteRollupDataOpExecute(r) } /* -CreateSloOp Creates an SLO. +DeleteRollupDataOp Batch delete rollup and summary data -You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. +The deletion occurs for the specified list of `sloId`. You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. - @return ApiCreateSloOpRequest + @return ApiDeleteRollupDataOpRequest */ -func (a *SloAPIService) CreateSloOp(ctx context.Context, spaceId string) ApiCreateSloOpRequest { - return ApiCreateSloOpRequest{ +func (a *SloAPIService) DeleteRollupDataOp(ctx context.Context, spaceId string) ApiDeleteRollupDataOpRequest { + return ApiDeleteRollupDataOpRequest{ ApiService: a, ctx: ctx, spaceId: spaceId, @@ -212,21 +799,21 @@ func (a *SloAPIService) CreateSloOp(ctx context.Context, spaceId string) ApiCrea // Execute executes the request // -// @return CreateSloResponse -func (a *SloAPIService) CreateSloOpExecute(r ApiCreateSloOpRequest) (*CreateSloResponse, *http.Response, error) { +// @return BulkPurgeRollupResponse +func (a *SloAPIService) DeleteRollupDataOpExecute(r ApiDeleteRollupDataOpRequest) (*BulkPurgeRollupResponse, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPost localVarPostBody interface{} formFiles []formFile - localVarReturnValue *CreateSloResponse + localVarReturnValue *BulkPurgeRollupResponse ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "SloAPIService.CreateSloOp") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "SloAPIService.DeleteRollupDataOp") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } - localVarPath := localBasePath + "/s/{spaceId}/api/observability/slos" + localVarPath := localBasePath + "/s/{spaceId}/api/observability/slos/_bulk_purge_rollup" localVarPath = strings.Replace(localVarPath, "{"+"spaceId"+"}", url.PathEscape(parameterValueToString(r.spaceId, "spaceId")), -1) localVarHeaderParams := make(map[string]string) @@ -235,8 +822,8 @@ func (a *SloAPIService) CreateSloOpExecute(r ApiCreateSloOpRequest) (*CreateSloR if r.kbnXsrf == nil { return localVarReturnValue, nil, reportError("kbnXsrf is required and must be specified") } - if r.createSloRequest == nil { - return localVarReturnValue, nil, reportError("createSloRequest is required and must be specified") + if r.bulkPurgeRollupRequest == nil { + return localVarReturnValue, nil, reportError("bulkPurgeRollupRequest is required and must be specified") } // to determine the Content-Type header @@ -258,7 +845,7 @@ func (a *SloAPIService) CreateSloOpExecute(r ApiCreateSloOpRequest) (*CreateSloR } parameterAddToHeaderOrQuery(localVarHeaderParams, "kbn-xsrf", r.kbnXsrf, "") // body params - localVarPostBody = r.createSloRequest + localVarPostBody = r.bulkPurgeRollupRequest if r.ctx != nil { // API Key Authentication if auth, ok := r.ctx.Value(ContextAPIKeys).(map[string]APIKey); ok { @@ -326,17 +913,6 @@ func (a *SloAPIService) CreateSloOpExecute(r ApiCreateSloOpRequest) (*CreateSloR } newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v - return localVarReturnValue, localVarHTTPResponse, newErr - } - if localVarHTTPResponse.StatusCode == 409 { - var v Model409Response - err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) - if err != nil { - newErr.error = err.Error() - return localVarReturnValue, localVarHTTPResponse, newErr - } - newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) - newErr.model = v } return localVarReturnValue, localVarHTTPResponse, newErr } @@ -377,9 +953,9 @@ func (r ApiDeleteSloInstancesOpRequest) Execute() (*http.Response, error) { } /* -DeleteSloInstancesOp Batch delete rollup and summary data for the matching list of sloId and instanceId +DeleteSloInstancesOp Batch delete rollup and summary data -You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. +The deletion occurs for the specified list of `sloId` and `instanceId`. You must have `all` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. @@ -532,7 +1108,7 @@ func (r ApiDeleteSloOpRequest) Execute() (*http.Response, error) { } /* -DeleteSloOp Deletes an SLO +DeleteSloOp Delete an SLO You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -696,7 +1272,7 @@ func (r ApiDisableSloOpRequest) Execute() (*http.Response, error) { } /* -DisableSloOp Disables an SLO +DisableSloOp Disable an SLO You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -860,7 +1436,7 @@ func (r ApiEnableSloOpRequest) Execute() (*http.Response, error) { } /* -EnableSloOp Enables an SLO +EnableSloOp Enable an SLO You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -1011,10 +1587,13 @@ type ApiFindSlosOpRequest struct { kbnXsrf *string spaceId string kqlQuery *string + size *int32 + searchAfter *[]string page *int32 perPage *int32 sortBy *string sortDirection *string + hideStale *bool } // Cross-site request forgery protection @@ -1029,13 +1608,25 @@ func (r ApiFindSlosOpRequest) KqlQuery(kqlQuery string) ApiFindSlosOpRequest { return r } -// The page number to return +// The page size to use for cursor-based pagination, must be greater or equal than 1 +func (r ApiFindSlosOpRequest) Size(size int32) ApiFindSlosOpRequest { + r.size = &size + return r +} + +// The cursor to use for fetching the results from, when using a cursor-base pagination. +func (r ApiFindSlosOpRequest) SearchAfter(searchAfter []string) ApiFindSlosOpRequest { + r.searchAfter = &searchAfter + return r +} + +// The page to use for pagination, must be greater or equal than 1 func (r ApiFindSlosOpRequest) Page(page int32) ApiFindSlosOpRequest { r.page = &page return r } -// The number of SLOs to return per page +// Number of SLOs returned by page func (r ApiFindSlosOpRequest) PerPage(perPage int32) ApiFindSlosOpRequest { r.perPage = &perPage return r @@ -1053,12 +1644,18 @@ func (r ApiFindSlosOpRequest) SortDirection(sortDirection string) ApiFindSlosOpR return r } +// Hide stale SLOs from the list as defined by stale SLO threshold in SLO settings +func (r ApiFindSlosOpRequest) HideStale(hideStale bool) ApiFindSlosOpRequest { + r.hideStale = &hideStale + return r +} + func (r ApiFindSlosOpRequest) Execute() (*FindSloResponse, *http.Response, error) { return r.ApiService.FindSlosOpExecute(r) } /* -FindSlosOp Retrieves a paginated list of SLOs +FindSlosOp Get a paginated list of SLOs You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -1103,6 +1700,23 @@ func (a *SloAPIService) FindSlosOpExecute(r ApiFindSlosOpRequest) (*FindSloRespo if r.kqlQuery != nil { parameterAddToHeaderOrQuery(localVarQueryParams, "kqlQuery", r.kqlQuery, "") } + if r.size != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "size", r.size, "") + } else { + var defaultValue int32 = 1 + r.size = &defaultValue + } + if r.searchAfter != nil { + t := *r.searchAfter + if reflect.TypeOf(t).Kind() == reflect.Slice { + s := reflect.ValueOf(t) + for i := 0; i < s.Len(); i++ { + parameterAddToHeaderOrQuery(localVarQueryParams, "searchAfter", s.Index(i).Interface(), "multi") + } + } else { + parameterAddToHeaderOrQuery(localVarQueryParams, "searchAfter", t, "multi") + } + } if r.page != nil { parameterAddToHeaderOrQuery(localVarQueryParams, "page", r.page, "") } else { @@ -1127,6 +1741,9 @@ func (a *SloAPIService) FindSlosOpExecute(r ApiFindSlosOpRequest) (*FindSloRespo var defaultValue string = "asc" r.sortDirection = &defaultValue } + if r.hideStale != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "hideStale", r.hideStale, "") + } // to determine the Content-Type header localVarHTTPContentTypes := []string{} @@ -1239,6 +1856,220 @@ func (a *SloAPIService) FindSlosOpExecute(r ApiFindSlosOpRequest) (*FindSloRespo return localVarReturnValue, localVarHTTPResponse, nil } +type ApiGetDefinitionsOpRequest struct { + ctx context.Context + ApiService SloAPI + kbnXsrf *string + spaceId string + includeOutdatedOnly *bool + tags *string + search *string + page *float64 + perPage *int32 +} + +// Cross-site request forgery protection +func (r ApiGetDefinitionsOpRequest) KbnXsrf(kbnXsrf string) ApiGetDefinitionsOpRequest { + r.kbnXsrf = &kbnXsrf + return r +} + +// Indicates if the API returns only outdated SLO or all SLO definitions +func (r ApiGetDefinitionsOpRequest) IncludeOutdatedOnly(includeOutdatedOnly bool) ApiGetDefinitionsOpRequest { + r.includeOutdatedOnly = &includeOutdatedOnly + return r +} + +// Filters the SLOs by tag +func (r ApiGetDefinitionsOpRequest) Tags(tags string) ApiGetDefinitionsOpRequest { + r.tags = &tags + return r +} + +// Filters the SLOs by name +func (r ApiGetDefinitionsOpRequest) Search(search string) ApiGetDefinitionsOpRequest { + r.search = &search + return r +} + +// The page to use for pagination, must be greater or equal than 1 +func (r ApiGetDefinitionsOpRequest) Page(page float64) ApiGetDefinitionsOpRequest { + r.page = &page + return r +} + +// Number of SLOs returned by page +func (r ApiGetDefinitionsOpRequest) PerPage(perPage int32) ApiGetDefinitionsOpRequest { + r.perPage = &perPage + return r +} + +func (r ApiGetDefinitionsOpRequest) Execute() (*FindSloDefinitionsResponse, *http.Response, error) { + return r.ApiService.GetDefinitionsOpExecute(r) +} + +/* +GetDefinitionsOp Get the SLO definitions + +You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. + + @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). + @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + @return ApiGetDefinitionsOpRequest +*/ +func (a *SloAPIService) GetDefinitionsOp(ctx context.Context, spaceId string) ApiGetDefinitionsOpRequest { + return ApiGetDefinitionsOpRequest{ + ApiService: a, + ctx: ctx, + spaceId: spaceId, + } +} + +// Execute executes the request +// +// @return FindSloDefinitionsResponse +func (a *SloAPIService) GetDefinitionsOpExecute(r ApiGetDefinitionsOpRequest) (*FindSloDefinitionsResponse, *http.Response, error) { + var ( + localVarHTTPMethod = http.MethodGet + localVarPostBody interface{} + formFiles []formFile + localVarReturnValue *FindSloDefinitionsResponse + ) + + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "SloAPIService.GetDefinitionsOp") + if err != nil { + return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} + } + + localVarPath := localBasePath + "/s/{spaceId}/internal/observability/slos/_definitions" + localVarPath = strings.Replace(localVarPath, "{"+"spaceId"+"}", url.PathEscape(parameterValueToString(r.spaceId, "spaceId")), -1) + + localVarHeaderParams := make(map[string]string) + localVarQueryParams := url.Values{} + localVarFormParams := url.Values{} + if r.kbnXsrf == nil { + return localVarReturnValue, nil, reportError("kbnXsrf is required and must be specified") + } + + if r.includeOutdatedOnly != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "includeOutdatedOnly", r.includeOutdatedOnly, "") + } + if r.tags != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "tags", r.tags, "") + } + if r.search != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "search", r.search, "") + } + if r.page != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "page", r.page, "") + } + if r.perPage != nil { + parameterAddToHeaderOrQuery(localVarQueryParams, "perPage", r.perPage, "") + } else { + var defaultValue int32 = 100 + r.perPage = &defaultValue + } + // to determine the Content-Type header + localVarHTTPContentTypes := []string{} + + // set Content-Type header + localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) + if localVarHTTPContentType != "" { + localVarHeaderParams["Content-Type"] = localVarHTTPContentType + } + + // to determine the Accept header + localVarHTTPHeaderAccepts := []string{"application/json"} + + // set Accept header + localVarHTTPHeaderAccept := selectHeaderAccept(localVarHTTPHeaderAccepts) + if localVarHTTPHeaderAccept != "" { + localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept + } + parameterAddToHeaderOrQuery(localVarHeaderParams, "kbn-xsrf", r.kbnXsrf, "") + if r.ctx != nil { + // API Key Authentication + if auth, ok := r.ctx.Value(ContextAPIKeys).(map[string]APIKey); ok { + if apiKey, ok := auth["apiKeyAuth"]; ok { + var key string + if apiKey.Prefix != "" { + key = apiKey.Prefix + " " + apiKey.Key + } else { + key = apiKey.Key + } + localVarHeaderParams["Authorization"] = key + } + } + } + req, err := a.client.prepareRequest(r.ctx, localVarPath, localVarHTTPMethod, localVarPostBody, localVarHeaderParams, localVarQueryParams, localVarFormParams, formFiles) + if err != nil { + return localVarReturnValue, nil, err + } + + localVarHTTPResponse, err := a.client.callAPI(req) + if err != nil || localVarHTTPResponse == nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + localVarBody, err := io.ReadAll(localVarHTTPResponse.Body) + localVarHTTPResponse.Body.Close() + localVarHTTPResponse.Body = io.NopCloser(bytes.NewBuffer(localVarBody)) + if err != nil { + return localVarReturnValue, localVarHTTPResponse, err + } + + if localVarHTTPResponse.StatusCode >= 300 { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: localVarHTTPResponse.Status, + } + if localVarHTTPResponse.StatusCode == 400 { + var v Model400Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 401 { + var v Model401Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 403 { + var v Model403Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + err = a.client.decode(&localVarReturnValue, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr := &GenericOpenAPIError{ + body: localVarBody, + error: err.Error(), + } + return localVarReturnValue, localVarHTTPResponse, newErr + } + + return localVarReturnValue, localVarHTTPResponse, nil +} + type ApiGetSloOpRequest struct { ctx context.Context ApiService SloAPI @@ -1260,12 +2091,12 @@ func (r ApiGetSloOpRequest) InstanceId(instanceId string) ApiGetSloOpRequest { return r } -func (r ApiGetSloOpRequest) Execute() (*SloResponse, *http.Response, error) { +func (r ApiGetSloOpRequest) Execute() (*SloWithSummaryResponse, *http.Response, error) { return r.ApiService.GetSloOpExecute(r) } /* -GetSloOp Retrieves a SLO +GetSloOp Get an SLO You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -1285,13 +2116,13 @@ func (a *SloAPIService) GetSloOp(ctx context.Context, spaceId string, sloId stri // Execute executes the request // -// @return SloResponse -func (a *SloAPIService) GetSloOpExecute(r ApiGetSloOpRequest) (*SloResponse, *http.Response, error) { +// @return SloWithSummaryResponse +func (a *SloAPIService) GetSloOpExecute(r ApiGetSloOpRequest) (*SloWithSummaryResponse, *http.Response, error) { var ( localVarHTTPMethod = http.MethodGet localVarPostBody interface{} formFiles []formFile - localVarReturnValue *SloResponse + localVarReturnValue *SloWithSummaryResponse ) localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "SloAPIService.GetSloOp") @@ -1425,64 +2256,62 @@ func (a *SloAPIService) GetSloOpExecute(r ApiGetSloOpRequest) (*SloResponse, *ht return localVarReturnValue, localVarHTTPResponse, nil } -type ApiHistoricalSummaryOpRequest struct { - ctx context.Context - ApiService SloAPI - kbnXsrf *string - spaceId string - historicalSummaryRequest *HistoricalSummaryRequest +type ApiResetSloOpRequest struct { + ctx context.Context + ApiService SloAPI + kbnXsrf *string + spaceId string + sloId string } // Cross-site request forgery protection -func (r ApiHistoricalSummaryOpRequest) KbnXsrf(kbnXsrf string) ApiHistoricalSummaryOpRequest { +func (r ApiResetSloOpRequest) KbnXsrf(kbnXsrf string) ApiResetSloOpRequest { r.kbnXsrf = &kbnXsrf return r } -func (r ApiHistoricalSummaryOpRequest) HistoricalSummaryRequest(historicalSummaryRequest HistoricalSummaryRequest) ApiHistoricalSummaryOpRequest { - r.historicalSummaryRequest = &historicalSummaryRequest - return r -} - -func (r ApiHistoricalSummaryOpRequest) Execute() (*map[string][]HistoricalSummaryResponseInner, *http.Response, error) { - return r.ApiService.HistoricalSummaryOpExecute(r) +func (r ApiResetSloOpRequest) Execute() (*SloDefinitionResponse, *http.Response, error) { + return r.ApiService.ResetSloOpExecute(r) } /* -HistoricalSummaryOp Retrieves the historical summary for a list of SLOs +ResetSloOp Reset an SLO -You must have the `read` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. +You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @param ctx context.Context - for authentication, logging, cancellation, deadlines, tracing, etc. Passed from http.Request or context.Background(). @param spaceId An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. - @return ApiHistoricalSummaryOpRequest + @param sloId An identifier for the slo. + @return ApiResetSloOpRequest */ -func (a *SloAPIService) HistoricalSummaryOp(ctx context.Context, spaceId string) ApiHistoricalSummaryOpRequest { - return ApiHistoricalSummaryOpRequest{ +func (a *SloAPIService) ResetSloOp(ctx context.Context, spaceId string, sloId string) ApiResetSloOpRequest { + return ApiResetSloOpRequest{ ApiService: a, ctx: ctx, spaceId: spaceId, + sloId: sloId, } } // Execute executes the request // -// @return map[string][]HistoricalSummaryResponseInner -func (a *SloAPIService) HistoricalSummaryOpExecute(r ApiHistoricalSummaryOpRequest) (*map[string][]HistoricalSummaryResponseInner, *http.Response, error) { +// @return SloDefinitionResponse +func (a *SloAPIService) ResetSloOpExecute(r ApiResetSloOpRequest) (*SloDefinitionResponse, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPost localVarPostBody interface{} formFiles []formFile - localVarReturnValue *map[string][]HistoricalSummaryResponseInner + localVarReturnValue *SloDefinitionResponse ) - localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "SloAPIService.HistoricalSummaryOp") + localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "SloAPIService.ResetSloOp") if err != nil { return localVarReturnValue, nil, &GenericOpenAPIError{error: err.Error()} } - localVarPath := localBasePath + "/s/{spaceId}/internal/observability/slos/_historical_summary" + localVarPath := localBasePath + "/s/{spaceId}/api/observability/slos/{sloId}/_reset" localVarPath = strings.Replace(localVarPath, "{"+"spaceId"+"}", url.PathEscape(parameterValueToString(r.spaceId, "spaceId")), -1) + localVarPath = strings.Replace(localVarPath, "{"+"sloId"+"}", url.PathEscape(parameterValueToString(r.sloId, "sloId")), -1) localVarHeaderParams := make(map[string]string) localVarQueryParams := url.Values{} @@ -1490,12 +2319,9 @@ func (a *SloAPIService) HistoricalSummaryOpExecute(r ApiHistoricalSummaryOpReque if r.kbnXsrf == nil { return localVarReturnValue, nil, reportError("kbnXsrf is required and must be specified") } - if r.historicalSummaryRequest == nil { - return localVarReturnValue, nil, reportError("historicalSummaryRequest is required and must be specified") - } // to determine the Content-Type header - localVarHTTPContentTypes := []string{"application/json"} + localVarHTTPContentTypes := []string{} // set Content-Type header localVarHTTPContentType := selectHeaderContentType(localVarHTTPContentTypes) @@ -1512,8 +2338,6 @@ func (a *SloAPIService) HistoricalSummaryOpExecute(r ApiHistoricalSummaryOpReque localVarHeaderParams["Accept"] = localVarHTTPHeaderAccept } parameterAddToHeaderOrQuery(localVarHeaderParams, "kbn-xsrf", r.kbnXsrf, "") - // body params - localVarPostBody = r.historicalSummaryRequest if r.ctx != nil { // API Key Authentication if auth, ok := r.ctx.Value(ContextAPIKeys).(map[string]APIKey); ok { @@ -1581,6 +2405,17 @@ func (a *SloAPIService) HistoricalSummaryOpExecute(r ApiHistoricalSummaryOpReque } newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) newErr.model = v + return localVarReturnValue, localVarHTTPResponse, newErr + } + if localVarHTTPResponse.StatusCode == 404 { + var v Model404Response + err = a.client.decode(&v, localVarBody, localVarHTTPResponse.Header.Get("Content-Type")) + if err != nil { + newErr.error = err.Error() + return localVarReturnValue, localVarHTTPResponse, newErr + } + newErr.error = formatErrorMessage(localVarHTTPResponse.Status, &v) + newErr.model = v } return localVarReturnValue, localVarHTTPResponse, newErr } @@ -1617,12 +2452,12 @@ func (r ApiUpdateSloOpRequest) UpdateSloRequest(updateSloRequest UpdateSloReques return r } -func (r ApiUpdateSloOpRequest) Execute() (*SloResponse, *http.Response, error) { +func (r ApiUpdateSloOpRequest) Execute() (*SloDefinitionResponse, *http.Response, error) { return r.ApiService.UpdateSloOpExecute(r) } /* -UpdateSloOp Updates an SLO +UpdateSloOp Update an SLO You must have the `write` privileges for the **SLOs** feature in the **Observability** section of the Kibana feature privileges. @@ -1642,13 +2477,13 @@ func (a *SloAPIService) UpdateSloOp(ctx context.Context, spaceId string, sloId s // Execute executes the request // -// @return SloResponse -func (a *SloAPIService) UpdateSloOpExecute(r ApiUpdateSloOpRequest) (*SloResponse, *http.Response, error) { +// @return SloDefinitionResponse +func (a *SloAPIService) UpdateSloOpExecute(r ApiUpdateSloOpRequest) (*SloDefinitionResponse, *http.Response, error) { var ( localVarHTTPMethod = http.MethodPut localVarPostBody interface{} formFiles []formFile - localVarReturnValue *SloResponse + localVarReturnValue *SloDefinitionResponse ) localBasePath, err := a.client.cfg.ServerURLWithContext(r.ctx, "SloAPIService.UpdateSloOp") diff --git a/generated/slo/client.go b/generated/slo/client.go index 6a012a56f..0a3629f63 100644 --- a/generated/slo/client.go +++ b/generated/slo/client.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -40,7 +40,7 @@ var ( queryDescape = strings.NewReplacer("%5B", "[", "%5D", "]") ) -// APIClient manages communication with the SLOs API v1.0 +// APIClient manages communication with the SLOs API v1.1 // In most cases there should be only one, shared, APIClient. type APIClient struct { cfg *Configuration diff --git a/generated/slo/configuration.go b/generated/slo/configuration.go index 2df8d268c..c4fa931a6 100644 --- a/generated/slo/configuration.go +++ b/generated/slo/configuration.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -96,24 +96,17 @@ func NewConfiguration() *Configuration { Debug: false, Servers: ServerConfigurations{ { - URL: "http://localhost:5601", - Description: "local", - }, - }, - OperationServers: map[string]ServerConfigurations{ - "SloAPIService.CreateSloOp": { - { - URL: "https://localhost:5601", - Description: "No description provided", - }, - }, - "SloAPIService.DeleteSloInstancesOp": { - { - URL: "https://localhost:5601", - Description: "No description provided", + URL: "https://{kibana_url}", + Description: "No description provided", + Variables: map[string]ServerVariable{ + "kibana_url": ServerVariable{ + Description: "No description provided", + DefaultValue: "localhost:5601", + }, }, }, }, + OperationServers: map[string]ServerConfigurations{}, } return cfg } diff --git a/generated/slo/docs/HistoricalSummaryRequest.md b/generated/slo/docs/BulkDeleteRequest.md similarity index 57% rename from generated/slo/docs/HistoricalSummaryRequest.md rename to generated/slo/docs/BulkDeleteRequest.md index c63ad83ec..a33e05ad7 100644 --- a/generated/slo/docs/HistoricalSummaryRequest.md +++ b/generated/slo/docs/BulkDeleteRequest.md @@ -1,46 +1,46 @@ -# HistoricalSummaryRequest +# BulkDeleteRequest ## Properties Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**List** | **[]string** | The list of SLO identifiers to get the historical summary for | +**List** | **[]string** | An array of SLO Definition id | ## Methods -### NewHistoricalSummaryRequest +### NewBulkDeleteRequest -`func NewHistoricalSummaryRequest(list []string, ) *HistoricalSummaryRequest` +`func NewBulkDeleteRequest(list []string, ) *BulkDeleteRequest` -NewHistoricalSummaryRequest instantiates a new HistoricalSummaryRequest object +NewBulkDeleteRequest instantiates a new BulkDeleteRequest object This constructor will assign default values to properties that have it defined, and makes sure properties required by API are set, but the set of arguments will change when the set of required properties is changed -### NewHistoricalSummaryRequestWithDefaults +### NewBulkDeleteRequestWithDefaults -`func NewHistoricalSummaryRequestWithDefaults() *HistoricalSummaryRequest` +`func NewBulkDeleteRequestWithDefaults() *BulkDeleteRequest` -NewHistoricalSummaryRequestWithDefaults instantiates a new HistoricalSummaryRequest object +NewBulkDeleteRequestWithDefaults instantiates a new BulkDeleteRequest object This constructor will only assign default values to properties that have it defined, but it doesn't guarantee that properties required by API are set ### GetList -`func (o *HistoricalSummaryRequest) GetList() []string` +`func (o *BulkDeleteRequest) GetList() []string` GetList returns the List field if non-nil, zero value otherwise. ### GetListOk -`func (o *HistoricalSummaryRequest) GetListOk() (*[]string, bool)` +`func (o *BulkDeleteRequest) GetListOk() (*[]string, bool)` GetListOk returns a tuple with the List field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetList -`func (o *HistoricalSummaryRequest) SetList(v []string)` +`func (o *BulkDeleteRequest) SetList(v []string)` SetList sets List field to given value. diff --git a/generated/slo/docs/BulkDeleteResponse.md b/generated/slo/docs/BulkDeleteResponse.md new file mode 100644 index 000000000..bdccff897 --- /dev/null +++ b/generated/slo/docs/BulkDeleteResponse.md @@ -0,0 +1,56 @@ +# BulkDeleteResponse + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**TaskId** | Pointer to **string** | The taskId of the bulk delete operation | [optional] + +## Methods + +### NewBulkDeleteResponse + +`func NewBulkDeleteResponse() *BulkDeleteResponse` + +NewBulkDeleteResponse instantiates a new BulkDeleteResponse object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewBulkDeleteResponseWithDefaults + +`func NewBulkDeleteResponseWithDefaults() *BulkDeleteResponse` + +NewBulkDeleteResponseWithDefaults instantiates a new BulkDeleteResponse object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetTaskId + +`func (o *BulkDeleteResponse) GetTaskId() string` + +GetTaskId returns the TaskId field if non-nil, zero value otherwise. + +### GetTaskIdOk + +`func (o *BulkDeleteResponse) GetTaskIdOk() (*string, bool)` + +GetTaskIdOk returns a tuple with the TaskId field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetTaskId + +`func (o *BulkDeleteResponse) SetTaskId(v string)` + +SetTaskId sets TaskId field to given value. + +### HasTaskId + +`func (o *BulkDeleteResponse) HasTaskId() bool` + +HasTaskId returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/BulkDeleteStatusResponse.md b/generated/slo/docs/BulkDeleteStatusResponse.md new file mode 100644 index 000000000..f17746615 --- /dev/null +++ b/generated/slo/docs/BulkDeleteStatusResponse.md @@ -0,0 +1,108 @@ +# BulkDeleteStatusResponse + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**IsDone** | Pointer to **bool** | Indicates if the bulk deletion operation is completed | [optional] +**Error** | Pointer to **string** | The error message if the bulk deletion operation failed | [optional] +**Results** | Pointer to [**[]BulkDeleteStatusResponseResultsInner**](BulkDeleteStatusResponseResultsInner.md) | The results of the bulk deletion operation, including the success status and any errors for each SLO | [optional] + +## Methods + +### NewBulkDeleteStatusResponse + +`func NewBulkDeleteStatusResponse() *BulkDeleteStatusResponse` + +NewBulkDeleteStatusResponse instantiates a new BulkDeleteStatusResponse object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewBulkDeleteStatusResponseWithDefaults + +`func NewBulkDeleteStatusResponseWithDefaults() *BulkDeleteStatusResponse` + +NewBulkDeleteStatusResponseWithDefaults instantiates a new BulkDeleteStatusResponse object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetIsDone + +`func (o *BulkDeleteStatusResponse) GetIsDone() bool` + +GetIsDone returns the IsDone field if non-nil, zero value otherwise. + +### GetIsDoneOk + +`func (o *BulkDeleteStatusResponse) GetIsDoneOk() (*bool, bool)` + +GetIsDoneOk returns a tuple with the IsDone field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetIsDone + +`func (o *BulkDeleteStatusResponse) SetIsDone(v bool)` + +SetIsDone sets IsDone field to given value. + +### HasIsDone + +`func (o *BulkDeleteStatusResponse) HasIsDone() bool` + +HasIsDone returns a boolean if a field has been set. + +### GetError + +`func (o *BulkDeleteStatusResponse) GetError() string` + +GetError returns the Error field if non-nil, zero value otherwise. + +### GetErrorOk + +`func (o *BulkDeleteStatusResponse) GetErrorOk() (*string, bool)` + +GetErrorOk returns a tuple with the Error field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetError + +`func (o *BulkDeleteStatusResponse) SetError(v string)` + +SetError sets Error field to given value. + +### HasError + +`func (o *BulkDeleteStatusResponse) HasError() bool` + +HasError returns a boolean if a field has been set. + +### GetResults + +`func (o *BulkDeleteStatusResponse) GetResults() []BulkDeleteStatusResponseResultsInner` + +GetResults returns the Results field if non-nil, zero value otherwise. + +### GetResultsOk + +`func (o *BulkDeleteStatusResponse) GetResultsOk() (*[]BulkDeleteStatusResponseResultsInner, bool)` + +GetResultsOk returns a tuple with the Results field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetResults + +`func (o *BulkDeleteStatusResponse) SetResults(v []BulkDeleteStatusResponseResultsInner)` + +SetResults sets Results field to given value. + +### HasResults + +`func (o *BulkDeleteStatusResponse) HasResults() bool` + +HasResults returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/BulkDeleteStatusResponseResultsInner.md b/generated/slo/docs/BulkDeleteStatusResponseResultsInner.md new file mode 100644 index 000000000..cbd4a3f83 --- /dev/null +++ b/generated/slo/docs/BulkDeleteStatusResponseResultsInner.md @@ -0,0 +1,108 @@ +# BulkDeleteStatusResponseResultsInner + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Id** | Pointer to **string** | The ID of the SLO that was deleted | [optional] +**Success** | Pointer to **bool** | The result of the deletion operation for this SLO | [optional] +**Error** | Pointer to **string** | The error message if the deletion operation failed for this SLO | [optional] + +## Methods + +### NewBulkDeleteStatusResponseResultsInner + +`func NewBulkDeleteStatusResponseResultsInner() *BulkDeleteStatusResponseResultsInner` + +NewBulkDeleteStatusResponseResultsInner instantiates a new BulkDeleteStatusResponseResultsInner object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewBulkDeleteStatusResponseResultsInnerWithDefaults + +`func NewBulkDeleteStatusResponseResultsInnerWithDefaults() *BulkDeleteStatusResponseResultsInner` + +NewBulkDeleteStatusResponseResultsInnerWithDefaults instantiates a new BulkDeleteStatusResponseResultsInner object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetId + +`func (o *BulkDeleteStatusResponseResultsInner) GetId() string` + +GetId returns the Id field if non-nil, zero value otherwise. + +### GetIdOk + +`func (o *BulkDeleteStatusResponseResultsInner) GetIdOk() (*string, bool)` + +GetIdOk returns a tuple with the Id field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetId + +`func (o *BulkDeleteStatusResponseResultsInner) SetId(v string)` + +SetId sets Id field to given value. + +### HasId + +`func (o *BulkDeleteStatusResponseResultsInner) HasId() bool` + +HasId returns a boolean if a field has been set. + +### GetSuccess + +`func (o *BulkDeleteStatusResponseResultsInner) GetSuccess() bool` + +GetSuccess returns the Success field if non-nil, zero value otherwise. + +### GetSuccessOk + +`func (o *BulkDeleteStatusResponseResultsInner) GetSuccessOk() (*bool, bool)` + +GetSuccessOk returns a tuple with the Success field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSuccess + +`func (o *BulkDeleteStatusResponseResultsInner) SetSuccess(v bool)` + +SetSuccess sets Success field to given value. + +### HasSuccess + +`func (o *BulkDeleteStatusResponseResultsInner) HasSuccess() bool` + +HasSuccess returns a boolean if a field has been set. + +### GetError + +`func (o *BulkDeleteStatusResponseResultsInner) GetError() string` + +GetError returns the Error field if non-nil, zero value otherwise. + +### GetErrorOk + +`func (o *BulkDeleteStatusResponseResultsInner) GetErrorOk() (*string, bool)` + +GetErrorOk returns a tuple with the Error field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetError + +`func (o *BulkDeleteStatusResponseResultsInner) SetError(v string)` + +SetError sets Error field to given value. + +### HasError + +`func (o *BulkDeleteStatusResponseResultsInner) HasError() bool` + +HasError returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/BulkPurgeRollupRequest.md b/generated/slo/docs/BulkPurgeRollupRequest.md new file mode 100644 index 000000000..f425aaefa --- /dev/null +++ b/generated/slo/docs/BulkPurgeRollupRequest.md @@ -0,0 +1,72 @@ +# BulkPurgeRollupRequest + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**List** | **[]string** | An array of slo ids | +**PurgePolicy** | [**BulkPurgeRollupRequestPurgePolicy**](BulkPurgeRollupRequestPurgePolicy.md) | | + +## Methods + +### NewBulkPurgeRollupRequest + +`func NewBulkPurgeRollupRequest(list []string, purgePolicy BulkPurgeRollupRequestPurgePolicy, ) *BulkPurgeRollupRequest` + +NewBulkPurgeRollupRequest instantiates a new BulkPurgeRollupRequest object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewBulkPurgeRollupRequestWithDefaults + +`func NewBulkPurgeRollupRequestWithDefaults() *BulkPurgeRollupRequest` + +NewBulkPurgeRollupRequestWithDefaults instantiates a new BulkPurgeRollupRequest object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetList + +`func (o *BulkPurgeRollupRequest) GetList() []string` + +GetList returns the List field if non-nil, zero value otherwise. + +### GetListOk + +`func (o *BulkPurgeRollupRequest) GetListOk() (*[]string, bool)` + +GetListOk returns a tuple with the List field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetList + +`func (o *BulkPurgeRollupRequest) SetList(v []string)` + +SetList sets List field to given value. + + +### GetPurgePolicy + +`func (o *BulkPurgeRollupRequest) GetPurgePolicy() BulkPurgeRollupRequestPurgePolicy` + +GetPurgePolicy returns the PurgePolicy field if non-nil, zero value otherwise. + +### GetPurgePolicyOk + +`func (o *BulkPurgeRollupRequest) GetPurgePolicyOk() (*BulkPurgeRollupRequestPurgePolicy, bool)` + +GetPurgePolicyOk returns a tuple with the PurgePolicy field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetPurgePolicy + +`func (o *BulkPurgeRollupRequest) SetPurgePolicy(v BulkPurgeRollupRequestPurgePolicy)` + +SetPurgePolicy sets PurgePolicy field to given value. + + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/BulkPurgeRollupRequestPurgePolicy.md b/generated/slo/docs/BulkPurgeRollupRequestPurgePolicy.md new file mode 100644 index 000000000..b9990c9a9 --- /dev/null +++ b/generated/slo/docs/BulkPurgeRollupRequestPurgePolicy.md @@ -0,0 +1,108 @@ +# BulkPurgeRollupRequestPurgePolicy + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**PurgeType** | Pointer to **string** | Specifies whether documents will be purged based on a specific age or on a timestamp | [optional] +**Age** | Pointer to **string** | The duration to determine which documents to purge, formatted as {duration}{unit}. This value should be greater than or equal to the time window of every SLO provided. | [optional] +**Timestamp** | Pointer to **string** | The timestamp to determine which documents to purge, formatted in ISO. This value should be older than the applicable time window of every SLO provided. | [optional] + +## Methods + +### NewBulkPurgeRollupRequestPurgePolicy + +`func NewBulkPurgeRollupRequestPurgePolicy() *BulkPurgeRollupRequestPurgePolicy` + +NewBulkPurgeRollupRequestPurgePolicy instantiates a new BulkPurgeRollupRequestPurgePolicy object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewBulkPurgeRollupRequestPurgePolicyWithDefaults + +`func NewBulkPurgeRollupRequestPurgePolicyWithDefaults() *BulkPurgeRollupRequestPurgePolicy` + +NewBulkPurgeRollupRequestPurgePolicyWithDefaults instantiates a new BulkPurgeRollupRequestPurgePolicy object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetPurgeType + +`func (o *BulkPurgeRollupRequestPurgePolicy) GetPurgeType() string` + +GetPurgeType returns the PurgeType field if non-nil, zero value otherwise. + +### GetPurgeTypeOk + +`func (o *BulkPurgeRollupRequestPurgePolicy) GetPurgeTypeOk() (*string, bool)` + +GetPurgeTypeOk returns a tuple with the PurgeType field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetPurgeType + +`func (o *BulkPurgeRollupRequestPurgePolicy) SetPurgeType(v string)` + +SetPurgeType sets PurgeType field to given value. + +### HasPurgeType + +`func (o *BulkPurgeRollupRequestPurgePolicy) HasPurgeType() bool` + +HasPurgeType returns a boolean if a field has been set. + +### GetAge + +`func (o *BulkPurgeRollupRequestPurgePolicy) GetAge() string` + +GetAge returns the Age field if non-nil, zero value otherwise. + +### GetAgeOk + +`func (o *BulkPurgeRollupRequestPurgePolicy) GetAgeOk() (*string, bool)` + +GetAgeOk returns a tuple with the Age field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetAge + +`func (o *BulkPurgeRollupRequestPurgePolicy) SetAge(v string)` + +SetAge sets Age field to given value. + +### HasAge + +`func (o *BulkPurgeRollupRequestPurgePolicy) HasAge() bool` + +HasAge returns a boolean if a field has been set. + +### GetTimestamp + +`func (o *BulkPurgeRollupRequestPurgePolicy) GetTimestamp() string` + +GetTimestamp returns the Timestamp field if non-nil, zero value otherwise. + +### GetTimestampOk + +`func (o *BulkPurgeRollupRequestPurgePolicy) GetTimestampOk() (*string, bool)` + +GetTimestampOk returns a tuple with the Timestamp field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetTimestamp + +`func (o *BulkPurgeRollupRequestPurgePolicy) SetTimestamp(v string)` + +SetTimestamp sets Timestamp field to given value. + +### HasTimestamp + +`func (o *BulkPurgeRollupRequestPurgePolicy) HasTimestamp() bool` + +HasTimestamp returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/BulkPurgeRollupRequestPurgePolicyOneOf.md b/generated/slo/docs/BulkPurgeRollupRequestPurgePolicyOneOf.md new file mode 100644 index 000000000..3a0f28221 --- /dev/null +++ b/generated/slo/docs/BulkPurgeRollupRequestPurgePolicyOneOf.md @@ -0,0 +1,82 @@ +# BulkPurgeRollupRequestPurgePolicyOneOf + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**PurgeType** | Pointer to **string** | Specifies whether documents will be purged based on a specific age or on a timestamp | [optional] +**Age** | Pointer to **string** | The duration to determine which documents to purge, formatted as {duration}{unit}. This value should be greater than or equal to the time window of every SLO provided. | [optional] + +## Methods + +### NewBulkPurgeRollupRequestPurgePolicyOneOf + +`func NewBulkPurgeRollupRequestPurgePolicyOneOf() *BulkPurgeRollupRequestPurgePolicyOneOf` + +NewBulkPurgeRollupRequestPurgePolicyOneOf instantiates a new BulkPurgeRollupRequestPurgePolicyOneOf object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewBulkPurgeRollupRequestPurgePolicyOneOfWithDefaults + +`func NewBulkPurgeRollupRequestPurgePolicyOneOfWithDefaults() *BulkPurgeRollupRequestPurgePolicyOneOf` + +NewBulkPurgeRollupRequestPurgePolicyOneOfWithDefaults instantiates a new BulkPurgeRollupRequestPurgePolicyOneOf object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetPurgeType + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf) GetPurgeType() string` + +GetPurgeType returns the PurgeType field if non-nil, zero value otherwise. + +### GetPurgeTypeOk + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf) GetPurgeTypeOk() (*string, bool)` + +GetPurgeTypeOk returns a tuple with the PurgeType field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetPurgeType + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf) SetPurgeType(v string)` + +SetPurgeType sets PurgeType field to given value. + +### HasPurgeType + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf) HasPurgeType() bool` + +HasPurgeType returns a boolean if a field has been set. + +### GetAge + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf) GetAge() string` + +GetAge returns the Age field if non-nil, zero value otherwise. + +### GetAgeOk + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf) GetAgeOk() (*string, bool)` + +GetAgeOk returns a tuple with the Age field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetAge + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf) SetAge(v string)` + +SetAge sets Age field to given value. + +### HasAge + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf) HasAge() bool` + +HasAge returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/BulkPurgeRollupRequestPurgePolicyOneOf1.md b/generated/slo/docs/BulkPurgeRollupRequestPurgePolicyOneOf1.md new file mode 100644 index 000000000..1b3bddbff --- /dev/null +++ b/generated/slo/docs/BulkPurgeRollupRequestPurgePolicyOneOf1.md @@ -0,0 +1,82 @@ +# BulkPurgeRollupRequestPurgePolicyOneOf1 + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**PurgeType** | Pointer to **string** | Specifies whether documents will be purged based on a specific age or on a timestamp | [optional] +**Timestamp** | Pointer to **string** | The timestamp to determine which documents to purge, formatted in ISO. This value should be older than the applicable time window of every SLO provided. | [optional] + +## Methods + +### NewBulkPurgeRollupRequestPurgePolicyOneOf1 + +`func NewBulkPurgeRollupRequestPurgePolicyOneOf1() *BulkPurgeRollupRequestPurgePolicyOneOf1` + +NewBulkPurgeRollupRequestPurgePolicyOneOf1 instantiates a new BulkPurgeRollupRequestPurgePolicyOneOf1 object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewBulkPurgeRollupRequestPurgePolicyOneOf1WithDefaults + +`func NewBulkPurgeRollupRequestPurgePolicyOneOf1WithDefaults() *BulkPurgeRollupRequestPurgePolicyOneOf1` + +NewBulkPurgeRollupRequestPurgePolicyOneOf1WithDefaults instantiates a new BulkPurgeRollupRequestPurgePolicyOneOf1 object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetPurgeType + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) GetPurgeType() string` + +GetPurgeType returns the PurgeType field if non-nil, zero value otherwise. + +### GetPurgeTypeOk + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) GetPurgeTypeOk() (*string, bool)` + +GetPurgeTypeOk returns a tuple with the PurgeType field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetPurgeType + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) SetPurgeType(v string)` + +SetPurgeType sets PurgeType field to given value. + +### HasPurgeType + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) HasPurgeType() bool` + +HasPurgeType returns a boolean if a field has been set. + +### GetTimestamp + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) GetTimestamp() string` + +GetTimestamp returns the Timestamp field if non-nil, zero value otherwise. + +### GetTimestampOk + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) GetTimestampOk() (*string, bool)` + +GetTimestampOk returns a tuple with the Timestamp field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetTimestamp + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) SetTimestamp(v string)` + +SetTimestamp sets Timestamp field to given value. + +### HasTimestamp + +`func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) HasTimestamp() bool` + +HasTimestamp returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/BulkPurgeRollupResponse.md b/generated/slo/docs/BulkPurgeRollupResponse.md new file mode 100644 index 000000000..862b02df2 --- /dev/null +++ b/generated/slo/docs/BulkPurgeRollupResponse.md @@ -0,0 +1,56 @@ +# BulkPurgeRollupResponse + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**TaskId** | Pointer to **string** | The task id of the purge operation | [optional] + +## Methods + +### NewBulkPurgeRollupResponse + +`func NewBulkPurgeRollupResponse() *BulkPurgeRollupResponse` + +NewBulkPurgeRollupResponse instantiates a new BulkPurgeRollupResponse object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewBulkPurgeRollupResponseWithDefaults + +`func NewBulkPurgeRollupResponseWithDefaults() *BulkPurgeRollupResponse` + +NewBulkPurgeRollupResponseWithDefaults instantiates a new BulkPurgeRollupResponse object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetTaskId + +`func (o *BulkPurgeRollupResponse) GetTaskId() string` + +GetTaskId returns the TaskId field if non-nil, zero value otherwise. + +### GetTaskIdOk + +`func (o *BulkPurgeRollupResponse) GetTaskIdOk() (*string, bool)` + +GetTaskIdOk returns a tuple with the TaskId field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetTaskId + +`func (o *BulkPurgeRollupResponse) SetTaskId(v string)` + +SetTaskId sets TaskId field to given value. + +### HasTaskId + +`func (o *BulkPurgeRollupResponse) HasTaskId() bool` + +HasTaskId returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/CreateSloRequest.md b/generated/slo/docs/CreateSloRequest.md index e8b5ec1bc..492603b08 100644 --- a/generated/slo/docs/CreateSloRequest.md +++ b/generated/slo/docs/CreateSloRequest.md @@ -12,7 +12,7 @@ Name | Type | Description | Notes **BudgetingMethod** | [**BudgetingMethod**](BudgetingMethod.md) | | **Objective** | [**Objective**](Objective.md) | | **Settings** | Pointer to [**Settings**](Settings.md) | | [optional] -**GroupBy** | Pointer to [**SloResponseGroupBy**](SloResponseGroupBy.md) | | [optional] +**GroupBy** | Pointer to [**GroupBy**](GroupBy.md) | | [optional] **Tags** | Pointer to **[]string** | List of tags | [optional] ## Methods @@ -206,20 +206,20 @@ HasSettings returns a boolean if a field has been set. ### GetGroupBy -`func (o *CreateSloRequest) GetGroupBy() SloResponseGroupBy` +`func (o *CreateSloRequest) GetGroupBy() GroupBy` GetGroupBy returns the GroupBy field if non-nil, zero value otherwise. ### GetGroupByOk -`func (o *CreateSloRequest) GetGroupByOk() (*SloResponseGroupBy, bool)` +`func (o *CreateSloRequest) GetGroupByOk() (*GroupBy, bool)` GetGroupByOk returns a tuple with the GroupBy field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetGroupBy -`func (o *CreateSloRequest) SetGroupBy(v SloResponseGroupBy)` +`func (o *CreateSloRequest) SetGroupBy(v GroupBy)` SetGroupBy sets GroupBy field to given value. diff --git a/generated/slo/docs/Filter.md b/generated/slo/docs/Filter.md new file mode 100644 index 000000000..291be3dd6 --- /dev/null +++ b/generated/slo/docs/Filter.md @@ -0,0 +1,82 @@ +# Filter + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Query** | Pointer to **map[string]interface{}** | | [optional] +**Meta** | Pointer to [**FilterMeta**](FilterMeta.md) | | [optional] + +## Methods + +### NewFilter + +`func NewFilter() *Filter` + +NewFilter instantiates a new Filter object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewFilterWithDefaults + +`func NewFilterWithDefaults() *Filter` + +NewFilterWithDefaults instantiates a new Filter object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetQuery + +`func (o *Filter) GetQuery() map[string]interface{}` + +GetQuery returns the Query field if non-nil, zero value otherwise. + +### GetQueryOk + +`func (o *Filter) GetQueryOk() (*map[string]interface{}, bool)` + +GetQueryOk returns a tuple with the Query field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetQuery + +`func (o *Filter) SetQuery(v map[string]interface{})` + +SetQuery sets Query field to given value. + +### HasQuery + +`func (o *Filter) HasQuery() bool` + +HasQuery returns a boolean if a field has been set. + +### GetMeta + +`func (o *Filter) GetMeta() FilterMeta` + +GetMeta returns the Meta field if non-nil, zero value otherwise. + +### GetMetaOk + +`func (o *Filter) GetMetaOk() (*FilterMeta, bool)` + +GetMetaOk returns a tuple with the Meta field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetMeta + +`func (o *Filter) SetMeta(v FilterMeta)` + +SetMeta sets Meta field to given value. + +### HasMeta + +`func (o *Filter) HasMeta() bool` + +HasMeta returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/FilterMeta.md b/generated/slo/docs/FilterMeta.md new file mode 100644 index 000000000..eea21e171 --- /dev/null +++ b/generated/slo/docs/FilterMeta.md @@ -0,0 +1,352 @@ +# FilterMeta + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Alias** | Pointer to **NullableString** | | [optional] +**Disabled** | Pointer to **bool** | | [optional] +**Negate** | Pointer to **bool** | | [optional] +**ControlledBy** | Pointer to **string** | | [optional] +**Group** | Pointer to **string** | | [optional] +**Index** | Pointer to **string** | | [optional] +**IsMultiIndex** | Pointer to **bool** | | [optional] +**Type** | Pointer to **string** | | [optional] +**Key** | Pointer to **string** | | [optional] +**Params** | Pointer to **map[string]interface{}** | | [optional] +**Value** | Pointer to **string** | | [optional] +**Field** | Pointer to **string** | | [optional] + +## Methods + +### NewFilterMeta + +`func NewFilterMeta() *FilterMeta` + +NewFilterMeta instantiates a new FilterMeta object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewFilterMetaWithDefaults + +`func NewFilterMetaWithDefaults() *FilterMeta` + +NewFilterMetaWithDefaults instantiates a new FilterMeta object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetAlias + +`func (o *FilterMeta) GetAlias() string` + +GetAlias returns the Alias field if non-nil, zero value otherwise. + +### GetAliasOk + +`func (o *FilterMeta) GetAliasOk() (*string, bool)` + +GetAliasOk returns a tuple with the Alias field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetAlias + +`func (o *FilterMeta) SetAlias(v string)` + +SetAlias sets Alias field to given value. + +### HasAlias + +`func (o *FilterMeta) HasAlias() bool` + +HasAlias returns a boolean if a field has been set. + +### SetAliasNil + +`func (o *FilterMeta) SetAliasNil(b bool)` + + SetAliasNil sets the value for Alias to be an explicit nil + +### UnsetAlias +`func (o *FilterMeta) UnsetAlias()` + +UnsetAlias ensures that no value is present for Alias, not even an explicit nil +### GetDisabled + +`func (o *FilterMeta) GetDisabled() bool` + +GetDisabled returns the Disabled field if non-nil, zero value otherwise. + +### GetDisabledOk + +`func (o *FilterMeta) GetDisabledOk() (*bool, bool)` + +GetDisabledOk returns a tuple with the Disabled field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDisabled + +`func (o *FilterMeta) SetDisabled(v bool)` + +SetDisabled sets Disabled field to given value. + +### HasDisabled + +`func (o *FilterMeta) HasDisabled() bool` + +HasDisabled returns a boolean if a field has been set. + +### GetNegate + +`func (o *FilterMeta) GetNegate() bool` + +GetNegate returns the Negate field if non-nil, zero value otherwise. + +### GetNegateOk + +`func (o *FilterMeta) GetNegateOk() (*bool, bool)` + +GetNegateOk returns a tuple with the Negate field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetNegate + +`func (o *FilterMeta) SetNegate(v bool)` + +SetNegate sets Negate field to given value. + +### HasNegate + +`func (o *FilterMeta) HasNegate() bool` + +HasNegate returns a boolean if a field has been set. + +### GetControlledBy + +`func (o *FilterMeta) GetControlledBy() string` + +GetControlledBy returns the ControlledBy field if non-nil, zero value otherwise. + +### GetControlledByOk + +`func (o *FilterMeta) GetControlledByOk() (*string, bool)` + +GetControlledByOk returns a tuple with the ControlledBy field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetControlledBy + +`func (o *FilterMeta) SetControlledBy(v string)` + +SetControlledBy sets ControlledBy field to given value. + +### HasControlledBy + +`func (o *FilterMeta) HasControlledBy() bool` + +HasControlledBy returns a boolean if a field has been set. + +### GetGroup + +`func (o *FilterMeta) GetGroup() string` + +GetGroup returns the Group field if non-nil, zero value otherwise. + +### GetGroupOk + +`func (o *FilterMeta) GetGroupOk() (*string, bool)` + +GetGroupOk returns a tuple with the Group field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetGroup + +`func (o *FilterMeta) SetGroup(v string)` + +SetGroup sets Group field to given value. + +### HasGroup + +`func (o *FilterMeta) HasGroup() bool` + +HasGroup returns a boolean if a field has been set. + +### GetIndex + +`func (o *FilterMeta) GetIndex() string` + +GetIndex returns the Index field if non-nil, zero value otherwise. + +### GetIndexOk + +`func (o *FilterMeta) GetIndexOk() (*string, bool)` + +GetIndexOk returns a tuple with the Index field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetIndex + +`func (o *FilterMeta) SetIndex(v string)` + +SetIndex sets Index field to given value. + +### HasIndex + +`func (o *FilterMeta) HasIndex() bool` + +HasIndex returns a boolean if a field has been set. + +### GetIsMultiIndex + +`func (o *FilterMeta) GetIsMultiIndex() bool` + +GetIsMultiIndex returns the IsMultiIndex field if non-nil, zero value otherwise. + +### GetIsMultiIndexOk + +`func (o *FilterMeta) GetIsMultiIndexOk() (*bool, bool)` + +GetIsMultiIndexOk returns a tuple with the IsMultiIndex field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetIsMultiIndex + +`func (o *FilterMeta) SetIsMultiIndex(v bool)` + +SetIsMultiIndex sets IsMultiIndex field to given value. + +### HasIsMultiIndex + +`func (o *FilterMeta) HasIsMultiIndex() bool` + +HasIsMultiIndex returns a boolean if a field has been set. + +### GetType + +`func (o *FilterMeta) GetType() string` + +GetType returns the Type field if non-nil, zero value otherwise. + +### GetTypeOk + +`func (o *FilterMeta) GetTypeOk() (*string, bool)` + +GetTypeOk returns a tuple with the Type field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetType + +`func (o *FilterMeta) SetType(v string)` + +SetType sets Type field to given value. + +### HasType + +`func (o *FilterMeta) HasType() bool` + +HasType returns a boolean if a field has been set. + +### GetKey + +`func (o *FilterMeta) GetKey() string` + +GetKey returns the Key field if non-nil, zero value otherwise. + +### GetKeyOk + +`func (o *FilterMeta) GetKeyOk() (*string, bool)` + +GetKeyOk returns a tuple with the Key field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetKey + +`func (o *FilterMeta) SetKey(v string)` + +SetKey sets Key field to given value. + +### HasKey + +`func (o *FilterMeta) HasKey() bool` + +HasKey returns a boolean if a field has been set. + +### GetParams + +`func (o *FilterMeta) GetParams() map[string]interface{}` + +GetParams returns the Params field if non-nil, zero value otherwise. + +### GetParamsOk + +`func (o *FilterMeta) GetParamsOk() (*map[string]interface{}, bool)` + +GetParamsOk returns a tuple with the Params field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetParams + +`func (o *FilterMeta) SetParams(v map[string]interface{})` + +SetParams sets Params field to given value. + +### HasParams + +`func (o *FilterMeta) HasParams() bool` + +HasParams returns a boolean if a field has been set. + +### GetValue + +`func (o *FilterMeta) GetValue() string` + +GetValue returns the Value field if non-nil, zero value otherwise. + +### GetValueOk + +`func (o *FilterMeta) GetValueOk() (*string, bool)` + +GetValueOk returns a tuple with the Value field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetValue + +`func (o *FilterMeta) SetValue(v string)` + +SetValue sets Value field to given value. + +### HasValue + +`func (o *FilterMeta) HasValue() bool` + +HasValue returns a boolean if a field has been set. + +### GetField + +`func (o *FilterMeta) GetField() string` + +GetField returns the Field field if non-nil, zero value otherwise. + +### GetFieldOk + +`func (o *FilterMeta) GetFieldOk() (*string, bool)` + +GetFieldOk returns a tuple with the Field field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetField + +`func (o *FilterMeta) SetField(v string)` + +SetField sets Field field to given value. + +### HasField + +`func (o *FilterMeta) HasField() bool` + +HasField returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/FindSloDefinitionsResponse.md b/generated/slo/docs/FindSloDefinitionsResponse.md new file mode 100644 index 000000000..5774f4339 --- /dev/null +++ b/generated/slo/docs/FindSloDefinitionsResponse.md @@ -0,0 +1,186 @@ +# FindSloDefinitionsResponse + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Page** | Pointer to **float64** | for backward compability | [optional] [default to 1] +**PerPage** | Pointer to **float64** | for backward compability | [optional] +**Total** | Pointer to **float64** | | [optional] +**Results** | Pointer to [**[]SloWithSummaryResponse**](SloWithSummaryResponse.md) | | [optional] +**Size** | Pointer to **float64** | | [optional] +**SearchAfter** | Pointer to **[]string** | the cursor to provide to get the next paged results | [optional] + +## Methods + +### NewFindSloDefinitionsResponse + +`func NewFindSloDefinitionsResponse() *FindSloDefinitionsResponse` + +NewFindSloDefinitionsResponse instantiates a new FindSloDefinitionsResponse object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewFindSloDefinitionsResponseWithDefaults + +`func NewFindSloDefinitionsResponseWithDefaults() *FindSloDefinitionsResponse` + +NewFindSloDefinitionsResponseWithDefaults instantiates a new FindSloDefinitionsResponse object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetPage + +`func (o *FindSloDefinitionsResponse) GetPage() float64` + +GetPage returns the Page field if non-nil, zero value otherwise. + +### GetPageOk + +`func (o *FindSloDefinitionsResponse) GetPageOk() (*float64, bool)` + +GetPageOk returns a tuple with the Page field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetPage + +`func (o *FindSloDefinitionsResponse) SetPage(v float64)` + +SetPage sets Page field to given value. + +### HasPage + +`func (o *FindSloDefinitionsResponse) HasPage() bool` + +HasPage returns a boolean if a field has been set. + +### GetPerPage + +`func (o *FindSloDefinitionsResponse) GetPerPage() float64` + +GetPerPage returns the PerPage field if non-nil, zero value otherwise. + +### GetPerPageOk + +`func (o *FindSloDefinitionsResponse) GetPerPageOk() (*float64, bool)` + +GetPerPageOk returns a tuple with the PerPage field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetPerPage + +`func (o *FindSloDefinitionsResponse) SetPerPage(v float64)` + +SetPerPage sets PerPage field to given value. + +### HasPerPage + +`func (o *FindSloDefinitionsResponse) HasPerPage() bool` + +HasPerPage returns a boolean if a field has been set. + +### GetTotal + +`func (o *FindSloDefinitionsResponse) GetTotal() float64` + +GetTotal returns the Total field if non-nil, zero value otherwise. + +### GetTotalOk + +`func (o *FindSloDefinitionsResponse) GetTotalOk() (*float64, bool)` + +GetTotalOk returns a tuple with the Total field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetTotal + +`func (o *FindSloDefinitionsResponse) SetTotal(v float64)` + +SetTotal sets Total field to given value. + +### HasTotal + +`func (o *FindSloDefinitionsResponse) HasTotal() bool` + +HasTotal returns a boolean if a field has been set. + +### GetResults + +`func (o *FindSloDefinitionsResponse) GetResults() []SloWithSummaryResponse` + +GetResults returns the Results field if non-nil, zero value otherwise. + +### GetResultsOk + +`func (o *FindSloDefinitionsResponse) GetResultsOk() (*[]SloWithSummaryResponse, bool)` + +GetResultsOk returns a tuple with the Results field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetResults + +`func (o *FindSloDefinitionsResponse) SetResults(v []SloWithSummaryResponse)` + +SetResults sets Results field to given value. + +### HasResults + +`func (o *FindSloDefinitionsResponse) HasResults() bool` + +HasResults returns a boolean if a field has been set. + +### GetSize + +`func (o *FindSloDefinitionsResponse) GetSize() float64` + +GetSize returns the Size field if non-nil, zero value otherwise. + +### GetSizeOk + +`func (o *FindSloDefinitionsResponse) GetSizeOk() (*float64, bool)` + +GetSizeOk returns a tuple with the Size field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSize + +`func (o *FindSloDefinitionsResponse) SetSize(v float64)` + +SetSize sets Size field to given value. + +### HasSize + +`func (o *FindSloDefinitionsResponse) HasSize() bool` + +HasSize returns a boolean if a field has been set. + +### GetSearchAfter + +`func (o *FindSloDefinitionsResponse) GetSearchAfter() []string` + +GetSearchAfter returns the SearchAfter field if non-nil, zero value otherwise. + +### GetSearchAfterOk + +`func (o *FindSloDefinitionsResponse) GetSearchAfterOk() (*[]string, bool)` + +GetSearchAfterOk returns a tuple with the SearchAfter field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSearchAfter + +`func (o *FindSloDefinitionsResponse) SetSearchAfter(v []string)` + +SetSearchAfter sets SearchAfter field to given value. + +### HasSearchAfter + +`func (o *FindSloDefinitionsResponse) HasSearchAfter() bool` + +HasSearchAfter returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/FindSloDefinitionsResponseOneOf.md b/generated/slo/docs/FindSloDefinitionsResponseOneOf.md new file mode 100644 index 000000000..7c10f87c4 --- /dev/null +++ b/generated/slo/docs/FindSloDefinitionsResponseOneOf.md @@ -0,0 +1,134 @@ +# FindSloDefinitionsResponseOneOf + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Page** | Pointer to **float64** | | [optional] +**PerPage** | Pointer to **float64** | | [optional] +**Total** | Pointer to **float64** | | [optional] +**Results** | Pointer to [**[]SloWithSummaryResponse**](SloWithSummaryResponse.md) | | [optional] + +## Methods + +### NewFindSloDefinitionsResponseOneOf + +`func NewFindSloDefinitionsResponseOneOf() *FindSloDefinitionsResponseOneOf` + +NewFindSloDefinitionsResponseOneOf instantiates a new FindSloDefinitionsResponseOneOf object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewFindSloDefinitionsResponseOneOfWithDefaults + +`func NewFindSloDefinitionsResponseOneOfWithDefaults() *FindSloDefinitionsResponseOneOf` + +NewFindSloDefinitionsResponseOneOfWithDefaults instantiates a new FindSloDefinitionsResponseOneOf object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetPage + +`func (o *FindSloDefinitionsResponseOneOf) GetPage() float64` + +GetPage returns the Page field if non-nil, zero value otherwise. + +### GetPageOk + +`func (o *FindSloDefinitionsResponseOneOf) GetPageOk() (*float64, bool)` + +GetPageOk returns a tuple with the Page field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetPage + +`func (o *FindSloDefinitionsResponseOneOf) SetPage(v float64)` + +SetPage sets Page field to given value. + +### HasPage + +`func (o *FindSloDefinitionsResponseOneOf) HasPage() bool` + +HasPage returns a boolean if a field has been set. + +### GetPerPage + +`func (o *FindSloDefinitionsResponseOneOf) GetPerPage() float64` + +GetPerPage returns the PerPage field if non-nil, zero value otherwise. + +### GetPerPageOk + +`func (o *FindSloDefinitionsResponseOneOf) GetPerPageOk() (*float64, bool)` + +GetPerPageOk returns a tuple with the PerPage field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetPerPage + +`func (o *FindSloDefinitionsResponseOneOf) SetPerPage(v float64)` + +SetPerPage sets PerPage field to given value. + +### HasPerPage + +`func (o *FindSloDefinitionsResponseOneOf) HasPerPage() bool` + +HasPerPage returns a boolean if a field has been set. + +### GetTotal + +`func (o *FindSloDefinitionsResponseOneOf) GetTotal() float64` + +GetTotal returns the Total field if non-nil, zero value otherwise. + +### GetTotalOk + +`func (o *FindSloDefinitionsResponseOneOf) GetTotalOk() (*float64, bool)` + +GetTotalOk returns a tuple with the Total field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetTotal + +`func (o *FindSloDefinitionsResponseOneOf) SetTotal(v float64)` + +SetTotal sets Total field to given value. + +### HasTotal + +`func (o *FindSloDefinitionsResponseOneOf) HasTotal() bool` + +HasTotal returns a boolean if a field has been set. + +### GetResults + +`func (o *FindSloDefinitionsResponseOneOf) GetResults() []SloWithSummaryResponse` + +GetResults returns the Results field if non-nil, zero value otherwise. + +### GetResultsOk + +`func (o *FindSloDefinitionsResponseOneOf) GetResultsOk() (*[]SloWithSummaryResponse, bool)` + +GetResultsOk returns a tuple with the Results field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetResults + +`func (o *FindSloDefinitionsResponseOneOf) SetResults(v []SloWithSummaryResponse)` + +SetResults sets Results field to given value. + +### HasResults + +`func (o *FindSloDefinitionsResponseOneOf) HasResults() bool` + +HasResults returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/FindSloDefinitionsResponseOneOf1.md b/generated/slo/docs/FindSloDefinitionsResponseOneOf1.md new file mode 100644 index 000000000..ee6e5ae94 --- /dev/null +++ b/generated/slo/docs/FindSloDefinitionsResponseOneOf1.md @@ -0,0 +1,186 @@ +# FindSloDefinitionsResponseOneOf1 + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Page** | Pointer to **float64** | for backward compability | [optional] [default to 1] +**PerPage** | Pointer to **float64** | for backward compability | [optional] +**Size** | Pointer to **float64** | | [optional] +**SearchAfter** | Pointer to **[]string** | the cursor to provide to get the next paged results | [optional] +**Total** | Pointer to **float64** | | [optional] +**Results** | Pointer to [**[]SloWithSummaryResponse**](SloWithSummaryResponse.md) | | [optional] + +## Methods + +### NewFindSloDefinitionsResponseOneOf1 + +`func NewFindSloDefinitionsResponseOneOf1() *FindSloDefinitionsResponseOneOf1` + +NewFindSloDefinitionsResponseOneOf1 instantiates a new FindSloDefinitionsResponseOneOf1 object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewFindSloDefinitionsResponseOneOf1WithDefaults + +`func NewFindSloDefinitionsResponseOneOf1WithDefaults() *FindSloDefinitionsResponseOneOf1` + +NewFindSloDefinitionsResponseOneOf1WithDefaults instantiates a new FindSloDefinitionsResponseOneOf1 object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetPage + +`func (o *FindSloDefinitionsResponseOneOf1) GetPage() float64` + +GetPage returns the Page field if non-nil, zero value otherwise. + +### GetPageOk + +`func (o *FindSloDefinitionsResponseOneOf1) GetPageOk() (*float64, bool)` + +GetPageOk returns a tuple with the Page field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetPage + +`func (o *FindSloDefinitionsResponseOneOf1) SetPage(v float64)` + +SetPage sets Page field to given value. + +### HasPage + +`func (o *FindSloDefinitionsResponseOneOf1) HasPage() bool` + +HasPage returns a boolean if a field has been set. + +### GetPerPage + +`func (o *FindSloDefinitionsResponseOneOf1) GetPerPage() float64` + +GetPerPage returns the PerPage field if non-nil, zero value otherwise. + +### GetPerPageOk + +`func (o *FindSloDefinitionsResponseOneOf1) GetPerPageOk() (*float64, bool)` + +GetPerPageOk returns a tuple with the PerPage field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetPerPage + +`func (o *FindSloDefinitionsResponseOneOf1) SetPerPage(v float64)` + +SetPerPage sets PerPage field to given value. + +### HasPerPage + +`func (o *FindSloDefinitionsResponseOneOf1) HasPerPage() bool` + +HasPerPage returns a boolean if a field has been set. + +### GetSize + +`func (o *FindSloDefinitionsResponseOneOf1) GetSize() float64` + +GetSize returns the Size field if non-nil, zero value otherwise. + +### GetSizeOk + +`func (o *FindSloDefinitionsResponseOneOf1) GetSizeOk() (*float64, bool)` + +GetSizeOk returns a tuple with the Size field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSize + +`func (o *FindSloDefinitionsResponseOneOf1) SetSize(v float64)` + +SetSize sets Size field to given value. + +### HasSize + +`func (o *FindSloDefinitionsResponseOneOf1) HasSize() bool` + +HasSize returns a boolean if a field has been set. + +### GetSearchAfter + +`func (o *FindSloDefinitionsResponseOneOf1) GetSearchAfter() []string` + +GetSearchAfter returns the SearchAfter field if non-nil, zero value otherwise. + +### GetSearchAfterOk + +`func (o *FindSloDefinitionsResponseOneOf1) GetSearchAfterOk() (*[]string, bool)` + +GetSearchAfterOk returns a tuple with the SearchAfter field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSearchAfter + +`func (o *FindSloDefinitionsResponseOneOf1) SetSearchAfter(v []string)` + +SetSearchAfter sets SearchAfter field to given value. + +### HasSearchAfter + +`func (o *FindSloDefinitionsResponseOneOf1) HasSearchAfter() bool` + +HasSearchAfter returns a boolean if a field has been set. + +### GetTotal + +`func (o *FindSloDefinitionsResponseOneOf1) GetTotal() float64` + +GetTotal returns the Total field if non-nil, zero value otherwise. + +### GetTotalOk + +`func (o *FindSloDefinitionsResponseOneOf1) GetTotalOk() (*float64, bool)` + +GetTotalOk returns a tuple with the Total field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetTotal + +`func (o *FindSloDefinitionsResponseOneOf1) SetTotal(v float64)` + +SetTotal sets Total field to given value. + +### HasTotal + +`func (o *FindSloDefinitionsResponseOneOf1) HasTotal() bool` + +HasTotal returns a boolean if a field has been set. + +### GetResults + +`func (o *FindSloDefinitionsResponseOneOf1) GetResults() []SloWithSummaryResponse` + +GetResults returns the Results field if non-nil, zero value otherwise. + +### GetResultsOk + +`func (o *FindSloDefinitionsResponseOneOf1) GetResultsOk() (*[]SloWithSummaryResponse, bool)` + +GetResultsOk returns a tuple with the Results field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetResults + +`func (o *FindSloDefinitionsResponseOneOf1) SetResults(v []SloWithSummaryResponse)` + +SetResults sets Results field to given value. + +### HasResults + +`func (o *FindSloDefinitionsResponseOneOf1) HasResults() bool` + +HasResults returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/FindSloResponse.md b/generated/slo/docs/FindSloResponse.md index f881cfaf7..c807a23cb 100644 --- a/generated/slo/docs/FindSloResponse.md +++ b/generated/slo/docs/FindSloResponse.md @@ -4,10 +4,12 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- +**Size** | Pointer to **float64** | Size provided for cursor based pagination | [optional] +**SearchAfter** | Pointer to **string** | | [optional] **Page** | Pointer to **float64** | | [optional] **PerPage** | Pointer to **float64** | | [optional] **Total** | Pointer to **float64** | | [optional] -**Results** | Pointer to [**[]SloResponse**](SloResponse.md) | | [optional] +**Results** | Pointer to [**[]SloWithSummaryResponse**](SloWithSummaryResponse.md) | | [optional] ## Methods @@ -28,6 +30,56 @@ NewFindSloResponseWithDefaults instantiates a new FindSloResponse object This constructor will only assign default values to properties that have it defined, but it doesn't guarantee that properties required by API are set +### GetSize + +`func (o *FindSloResponse) GetSize() float64` + +GetSize returns the Size field if non-nil, zero value otherwise. + +### GetSizeOk + +`func (o *FindSloResponse) GetSizeOk() (*float64, bool)` + +GetSizeOk returns a tuple with the Size field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSize + +`func (o *FindSloResponse) SetSize(v float64)` + +SetSize sets Size field to given value. + +### HasSize + +`func (o *FindSloResponse) HasSize() bool` + +HasSize returns a boolean if a field has been set. + +### GetSearchAfter + +`func (o *FindSloResponse) GetSearchAfter() string` + +GetSearchAfter returns the SearchAfter field if non-nil, zero value otherwise. + +### GetSearchAfterOk + +`func (o *FindSloResponse) GetSearchAfterOk() (*string, bool)` + +GetSearchAfterOk returns a tuple with the SearchAfter field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSearchAfter + +`func (o *FindSloResponse) SetSearchAfter(v string)` + +SetSearchAfter sets SearchAfter field to given value. + +### HasSearchAfter + +`func (o *FindSloResponse) HasSearchAfter() bool` + +HasSearchAfter returns a boolean if a field has been set. + ### GetPage `func (o *FindSloResponse) GetPage() float64` @@ -105,20 +157,20 @@ HasTotal returns a boolean if a field has been set. ### GetResults -`func (o *FindSloResponse) GetResults() []SloResponse` +`func (o *FindSloResponse) GetResults() []SloWithSummaryResponse` GetResults returns the Results field if non-nil, zero value otherwise. ### GetResultsOk -`func (o *FindSloResponse) GetResultsOk() (*[]SloResponse, bool)` +`func (o *FindSloResponse) GetResultsOk() (*[]SloWithSummaryResponse, bool)` GetResultsOk returns a tuple with the Results field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetResults -`func (o *FindSloResponse) SetResults(v []SloResponse)` +`func (o *FindSloResponse) SetResults(v []SloWithSummaryResponse)` SetResults sets Results field to given value. diff --git a/generated/slo/docs/SloResponseGroupBy.md b/generated/slo/docs/GroupBy.md similarity index 65% rename from generated/slo/docs/SloResponseGroupBy.md rename to generated/slo/docs/GroupBy.md index 81a44275f..1d8f878a1 100644 --- a/generated/slo/docs/SloResponseGroupBy.md +++ b/generated/slo/docs/GroupBy.md @@ -1,4 +1,4 @@ -# SloResponseGroupBy +# GroupBy ## Properties @@ -7,20 +7,20 @@ Name | Type | Description | Notes ## Methods -### NewSloResponseGroupBy +### NewGroupBy -`func NewSloResponseGroupBy() *SloResponseGroupBy` +`func NewGroupBy() *GroupBy` -NewSloResponseGroupBy instantiates a new SloResponseGroupBy object +NewGroupBy instantiates a new GroupBy object This constructor will assign default values to properties that have it defined, and makes sure properties required by API are set, but the set of arguments will change when the set of required properties is changed -### NewSloResponseGroupByWithDefaults +### NewGroupByWithDefaults -`func NewSloResponseGroupByWithDefaults() *SloResponseGroupBy` +`func NewGroupByWithDefaults() *GroupBy` -NewSloResponseGroupByWithDefaults instantiates a new SloResponseGroupBy object +NewGroupByWithDefaults instantiates a new GroupBy object This constructor will only assign default values to properties that have it defined, but it doesn't guarantee that properties required by API are set diff --git a/generated/slo/docs/HistoricalSummaryResponseInner.md b/generated/slo/docs/HistoricalSummaryResponseInner.md deleted file mode 100644 index ea3c738b3..000000000 --- a/generated/slo/docs/HistoricalSummaryResponseInner.md +++ /dev/null @@ -1,134 +0,0 @@ -# HistoricalSummaryResponseInner - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**Date** | Pointer to **string** | | [optional] -**Status** | Pointer to [**SummaryStatus**](SummaryStatus.md) | | [optional] -**SliValue** | Pointer to **float64** | | [optional] -**ErrorBudget** | Pointer to [**ErrorBudget**](ErrorBudget.md) | | [optional] - -## Methods - -### NewHistoricalSummaryResponseInner - -`func NewHistoricalSummaryResponseInner() *HistoricalSummaryResponseInner` - -NewHistoricalSummaryResponseInner instantiates a new HistoricalSummaryResponseInner object -This constructor will assign default values to properties that have it defined, -and makes sure properties required by API are set, but the set of arguments -will change when the set of required properties is changed - -### NewHistoricalSummaryResponseInnerWithDefaults - -`func NewHistoricalSummaryResponseInnerWithDefaults() *HistoricalSummaryResponseInner` - -NewHistoricalSummaryResponseInnerWithDefaults instantiates a new HistoricalSummaryResponseInner object -This constructor will only assign default values to properties that have it defined, -but it doesn't guarantee that properties required by API are set - -### GetDate - -`func (o *HistoricalSummaryResponseInner) GetDate() string` - -GetDate returns the Date field if non-nil, zero value otherwise. - -### GetDateOk - -`func (o *HistoricalSummaryResponseInner) GetDateOk() (*string, bool)` - -GetDateOk returns a tuple with the Date field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetDate - -`func (o *HistoricalSummaryResponseInner) SetDate(v string)` - -SetDate sets Date field to given value. - -### HasDate - -`func (o *HistoricalSummaryResponseInner) HasDate() bool` - -HasDate returns a boolean if a field has been set. - -### GetStatus - -`func (o *HistoricalSummaryResponseInner) GetStatus() SummaryStatus` - -GetStatus returns the Status field if non-nil, zero value otherwise. - -### GetStatusOk - -`func (o *HistoricalSummaryResponseInner) GetStatusOk() (*SummaryStatus, bool)` - -GetStatusOk returns a tuple with the Status field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetStatus - -`func (o *HistoricalSummaryResponseInner) SetStatus(v SummaryStatus)` - -SetStatus sets Status field to given value. - -### HasStatus - -`func (o *HistoricalSummaryResponseInner) HasStatus() bool` - -HasStatus returns a boolean if a field has been set. - -### GetSliValue - -`func (o *HistoricalSummaryResponseInner) GetSliValue() float64` - -GetSliValue returns the SliValue field if non-nil, zero value otherwise. - -### GetSliValueOk - -`func (o *HistoricalSummaryResponseInner) GetSliValueOk() (*float64, bool)` - -GetSliValueOk returns a tuple with the SliValue field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetSliValue - -`func (o *HistoricalSummaryResponseInner) SetSliValue(v float64)` - -SetSliValue sets SliValue field to given value. - -### HasSliValue - -`func (o *HistoricalSummaryResponseInner) HasSliValue() bool` - -HasSliValue returns a boolean if a field has been set. - -### GetErrorBudget - -`func (o *HistoricalSummaryResponseInner) GetErrorBudget() ErrorBudget` - -GetErrorBudget returns the ErrorBudget field if non-nil, zero value otherwise. - -### GetErrorBudgetOk - -`func (o *HistoricalSummaryResponseInner) GetErrorBudgetOk() (*ErrorBudget, bool)` - -GetErrorBudgetOk returns a tuple with the ErrorBudget field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetErrorBudget - -`func (o *HistoricalSummaryResponseInner) SetErrorBudget(v ErrorBudget)` - -SetErrorBudget sets ErrorBudget field to given value. - -### HasErrorBudget - -`func (o *HistoricalSummaryResponseInner) HasErrorBudget() bool` - -HasErrorBudget returns a boolean if a field has been set. - - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/generated/slo/docs/IndicatorPropertiesCustomKqlParams.md b/generated/slo/docs/IndicatorPropertiesCustomKqlParams.md index 317db1569..9b044dfbf 100644 --- a/generated/slo/docs/IndicatorPropertiesCustomKqlParams.md +++ b/generated/slo/docs/IndicatorPropertiesCustomKqlParams.md @@ -5,16 +5,17 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **Index** | **string** | The index or index pattern to use | -**Filter** | Pointer to **string** | the KQL query to filter the documents with. | [optional] -**Good** | **string** | the KQL query used to define the good events. | -**Total** | **string** | the KQL query used to define all events. | +**DataViewId** | Pointer to **string** | The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. | [optional] +**Filter** | Pointer to [**KqlWithFilters**](KqlWithFilters.md) | | [optional] +**Good** | [**KqlWithFiltersGood**](KqlWithFiltersGood.md) | | +**Total** | [**KqlWithFiltersTotal**](KqlWithFiltersTotal.md) | | **TimestampField** | **string** | The timestamp field used in the source indice. | ## Methods ### NewIndicatorPropertiesCustomKqlParams -`func NewIndicatorPropertiesCustomKqlParams(index string, good string, total string, timestampField string, ) *IndicatorPropertiesCustomKqlParams` +`func NewIndicatorPropertiesCustomKqlParams(index string, good KqlWithFiltersGood, total KqlWithFiltersTotal, timestampField string, ) *IndicatorPropertiesCustomKqlParams` NewIndicatorPropertiesCustomKqlParams instantiates a new IndicatorPropertiesCustomKqlParams object This constructor will assign default values to properties that have it defined, @@ -49,22 +50,47 @@ and a boolean to check if the value has been set. SetIndex sets Index field to given value. +### GetDataViewId + +`func (o *IndicatorPropertiesCustomKqlParams) GetDataViewId() string` + +GetDataViewId returns the DataViewId field if non-nil, zero value otherwise. + +### GetDataViewIdOk + +`func (o *IndicatorPropertiesCustomKqlParams) GetDataViewIdOk() (*string, bool)` + +GetDataViewIdOk returns a tuple with the DataViewId field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDataViewId + +`func (o *IndicatorPropertiesCustomKqlParams) SetDataViewId(v string)` + +SetDataViewId sets DataViewId field to given value. + +### HasDataViewId + +`func (o *IndicatorPropertiesCustomKqlParams) HasDataViewId() bool` + +HasDataViewId returns a boolean if a field has been set. + ### GetFilter -`func (o *IndicatorPropertiesCustomKqlParams) GetFilter() string` +`func (o *IndicatorPropertiesCustomKqlParams) GetFilter() KqlWithFilters` GetFilter returns the Filter field if non-nil, zero value otherwise. ### GetFilterOk -`func (o *IndicatorPropertiesCustomKqlParams) GetFilterOk() (*string, bool)` +`func (o *IndicatorPropertiesCustomKqlParams) GetFilterOk() (*KqlWithFilters, bool)` GetFilterOk returns a tuple with the Filter field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetFilter -`func (o *IndicatorPropertiesCustomKqlParams) SetFilter(v string)` +`func (o *IndicatorPropertiesCustomKqlParams) SetFilter(v KqlWithFilters)` SetFilter sets Filter field to given value. @@ -76,40 +102,40 @@ HasFilter returns a boolean if a field has been set. ### GetGood -`func (o *IndicatorPropertiesCustomKqlParams) GetGood() string` +`func (o *IndicatorPropertiesCustomKqlParams) GetGood() KqlWithFiltersGood` GetGood returns the Good field if non-nil, zero value otherwise. ### GetGoodOk -`func (o *IndicatorPropertiesCustomKqlParams) GetGoodOk() (*string, bool)` +`func (o *IndicatorPropertiesCustomKqlParams) GetGoodOk() (*KqlWithFiltersGood, bool)` GetGoodOk returns a tuple with the Good field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetGood -`func (o *IndicatorPropertiesCustomKqlParams) SetGood(v string)` +`func (o *IndicatorPropertiesCustomKqlParams) SetGood(v KqlWithFiltersGood)` SetGood sets Good field to given value. ### GetTotal -`func (o *IndicatorPropertiesCustomKqlParams) GetTotal() string` +`func (o *IndicatorPropertiesCustomKqlParams) GetTotal() KqlWithFiltersTotal` GetTotal returns the Total field if non-nil, zero value otherwise. ### GetTotalOk -`func (o *IndicatorPropertiesCustomKqlParams) GetTotalOk() (*string, bool)` +`func (o *IndicatorPropertiesCustomKqlParams) GetTotalOk() (*KqlWithFiltersTotal, bool)` GetTotalOk returns a tuple with the Total field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetTotal -`func (o *IndicatorPropertiesCustomKqlParams) SetTotal(v string)` +`func (o *IndicatorPropertiesCustomKqlParams) SetTotal(v KqlWithFiltersTotal)` SetTotal sets Total field to given value. diff --git a/generated/slo/docs/IndicatorPropertiesCustomMetricParams.md b/generated/slo/docs/IndicatorPropertiesCustomMetricParams.md index ea6814410..28532ea6b 100644 --- a/generated/slo/docs/IndicatorPropertiesCustomMetricParams.md +++ b/generated/slo/docs/IndicatorPropertiesCustomMetricParams.md @@ -5,6 +5,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **Index** | **string** | The index or index pattern to use | +**DataViewId** | Pointer to **string** | The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. | [optional] **Filter** | Pointer to **string** | the KQL query to filter the documents with. | [optional] **TimestampField** | **string** | The timestamp field used in the source indice. | **Good** | [**IndicatorPropertiesCustomMetricParamsGood**](IndicatorPropertiesCustomMetricParamsGood.md) | | @@ -49,6 +50,31 @@ and a boolean to check if the value has been set. SetIndex sets Index field to given value. +### GetDataViewId + +`func (o *IndicatorPropertiesCustomMetricParams) GetDataViewId() string` + +GetDataViewId returns the DataViewId field if non-nil, zero value otherwise. + +### GetDataViewIdOk + +`func (o *IndicatorPropertiesCustomMetricParams) GetDataViewIdOk() (*string, bool)` + +GetDataViewIdOk returns a tuple with the DataViewId field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDataViewId + +`func (o *IndicatorPropertiesCustomMetricParams) SetDataViewId(v string)` + +SetDataViewId sets DataViewId field to given value. + +### HasDataViewId + +`func (o *IndicatorPropertiesCustomMetricParams) HasDataViewId() bool` + +HasDataViewId returns a boolean if a field has been set. + ### GetFilter `func (o *IndicatorPropertiesCustomMetricParams) GetFilter() string` diff --git a/generated/slo/docs/IndicatorPropertiesCustomMetricParamsGoodMetricsInner.md b/generated/slo/docs/IndicatorPropertiesCustomMetricParamsGoodMetricsInner.md index 3d485fa6c..579856e03 100644 --- a/generated/slo/docs/IndicatorPropertiesCustomMetricParamsGoodMetricsInner.md +++ b/generated/slo/docs/IndicatorPropertiesCustomMetricParamsGoodMetricsInner.md @@ -5,7 +5,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **Name** | **string** | The name of the metric. Only valid options are A-Z | -**Aggregation** | **string** | The aggregation type of the metric. Only valid option is \"sum\" | +**Aggregation** | **string** | The aggregation type of the metric. | **Field** | **string** | The field of the metric. | **Filter** | Pointer to **string** | The filter to apply to the metric. | [optional] diff --git a/generated/slo/docs/IndicatorPropertiesCustomMetricParamsTotal.md b/generated/slo/docs/IndicatorPropertiesCustomMetricParamsTotal.md index 9b33ee2fe..f0f1df1af 100644 --- a/generated/slo/docs/IndicatorPropertiesCustomMetricParamsTotal.md +++ b/generated/slo/docs/IndicatorPropertiesCustomMetricParamsTotal.md @@ -4,14 +4,14 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**Metrics** | [**[]IndicatorPropertiesCustomMetricParamsTotalMetricsInner**](IndicatorPropertiesCustomMetricParamsTotalMetricsInner.md) | List of metrics with their name, aggregation type, and field. | +**Metrics** | [**[]IndicatorPropertiesCustomMetricParamsGoodMetricsInner**](IndicatorPropertiesCustomMetricParamsGoodMetricsInner.md) | List of metrics with their name, aggregation type, and field. | **Equation** | **string** | The equation to calculate the \"total\" metric. | ## Methods ### NewIndicatorPropertiesCustomMetricParamsTotal -`func NewIndicatorPropertiesCustomMetricParamsTotal(metrics []IndicatorPropertiesCustomMetricParamsTotalMetricsInner, equation string, ) *IndicatorPropertiesCustomMetricParamsTotal` +`func NewIndicatorPropertiesCustomMetricParamsTotal(metrics []IndicatorPropertiesCustomMetricParamsGoodMetricsInner, equation string, ) *IndicatorPropertiesCustomMetricParamsTotal` NewIndicatorPropertiesCustomMetricParamsTotal instantiates a new IndicatorPropertiesCustomMetricParamsTotal object This constructor will assign default values to properties that have it defined, @@ -28,20 +28,20 @@ but it doesn't guarantee that properties required by API are set ### GetMetrics -`func (o *IndicatorPropertiesCustomMetricParamsTotal) GetMetrics() []IndicatorPropertiesCustomMetricParamsTotalMetricsInner` +`func (o *IndicatorPropertiesCustomMetricParamsTotal) GetMetrics() []IndicatorPropertiesCustomMetricParamsGoodMetricsInner` GetMetrics returns the Metrics field if non-nil, zero value otherwise. ### GetMetricsOk -`func (o *IndicatorPropertiesCustomMetricParamsTotal) GetMetricsOk() (*[]IndicatorPropertiesCustomMetricParamsTotalMetricsInner, bool)` +`func (o *IndicatorPropertiesCustomMetricParamsTotal) GetMetricsOk() (*[]IndicatorPropertiesCustomMetricParamsGoodMetricsInner, bool)` GetMetricsOk returns a tuple with the Metrics field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetMetrics -`func (o *IndicatorPropertiesCustomMetricParamsTotal) SetMetrics(v []IndicatorPropertiesCustomMetricParamsTotalMetricsInner)` +`func (o *IndicatorPropertiesCustomMetricParamsTotal) SetMetrics(v []IndicatorPropertiesCustomMetricParamsGoodMetricsInner)` SetMetrics sets Metrics field to given value. diff --git a/generated/slo/docs/IndicatorPropertiesCustomMetricParamsTotalMetricsInner.md b/generated/slo/docs/IndicatorPropertiesCustomMetricParamsTotalMetricsInner.md deleted file mode 100644 index 6f528a370..000000000 --- a/generated/slo/docs/IndicatorPropertiesCustomMetricParamsTotalMetricsInner.md +++ /dev/null @@ -1,119 +0,0 @@ -# IndicatorPropertiesCustomMetricParamsTotalMetricsInner - -## Properties - -Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- -**Name** | **string** | The name of the metric. Only valid options are A-Z | -**Aggregation** | **string** | The aggregation type of the metric. Only valid option is \"sum\" | -**Field** | **string** | The field of the metric. | -**Filter** | Pointer to **string** | The filter to apply to the metric. | [optional] - -## Methods - -### NewIndicatorPropertiesCustomMetricParamsTotalMetricsInner - -`func NewIndicatorPropertiesCustomMetricParamsTotalMetricsInner(name string, aggregation string, field string, ) *IndicatorPropertiesCustomMetricParamsTotalMetricsInner` - -NewIndicatorPropertiesCustomMetricParamsTotalMetricsInner instantiates a new IndicatorPropertiesCustomMetricParamsTotalMetricsInner object -This constructor will assign default values to properties that have it defined, -and makes sure properties required by API are set, but the set of arguments -will change when the set of required properties is changed - -### NewIndicatorPropertiesCustomMetricParamsTotalMetricsInnerWithDefaults - -`func NewIndicatorPropertiesCustomMetricParamsTotalMetricsInnerWithDefaults() *IndicatorPropertiesCustomMetricParamsTotalMetricsInner` - -NewIndicatorPropertiesCustomMetricParamsTotalMetricsInnerWithDefaults instantiates a new IndicatorPropertiesCustomMetricParamsTotalMetricsInner object -This constructor will only assign default values to properties that have it defined, -but it doesn't guarantee that properties required by API are set - -### GetName - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetName() string` - -GetName returns the Name field if non-nil, zero value otherwise. - -### GetNameOk - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetNameOk() (*string, bool)` - -GetNameOk returns a tuple with the Name field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetName - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) SetName(v string)` - -SetName sets Name field to given value. - - -### GetAggregation - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetAggregation() string` - -GetAggregation returns the Aggregation field if non-nil, zero value otherwise. - -### GetAggregationOk - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetAggregationOk() (*string, bool)` - -GetAggregationOk returns a tuple with the Aggregation field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetAggregation - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) SetAggregation(v string)` - -SetAggregation sets Aggregation field to given value. - - -### GetField - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetField() string` - -GetField returns the Field field if non-nil, zero value otherwise. - -### GetFieldOk - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetFieldOk() (*string, bool)` - -GetFieldOk returns a tuple with the Field field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetField - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) SetField(v string)` - -SetField sets Field field to given value. - - -### GetFilter - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetFilter() string` - -GetFilter returns the Filter field if non-nil, zero value otherwise. - -### GetFilterOk - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetFilterOk() (*string, bool)` - -GetFilterOk returns a tuple with the Filter field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetFilter - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) SetFilter(v string)` - -SetFilter sets Filter field to given value. - -### HasFilter - -`func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) HasFilter() bool` - -HasFilter returns a boolean if a field has been set. - - -[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) - - diff --git a/generated/slo/docs/IndicatorPropertiesHistogramParams.md b/generated/slo/docs/IndicatorPropertiesHistogramParams.md index 89263c048..f2b432a67 100644 --- a/generated/slo/docs/IndicatorPropertiesHistogramParams.md +++ b/generated/slo/docs/IndicatorPropertiesHistogramParams.md @@ -5,6 +5,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **Index** | **string** | The index or index pattern to use | +**DataViewId** | Pointer to **string** | The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. | [optional] **Filter** | Pointer to **string** | the KQL query to filter the documents with. | [optional] **TimestampField** | **string** | The timestamp field used in the source indice. | **Good** | [**IndicatorPropertiesHistogramParamsGood**](IndicatorPropertiesHistogramParamsGood.md) | | @@ -49,6 +50,31 @@ and a boolean to check if the value has been set. SetIndex sets Index field to given value. +### GetDataViewId + +`func (o *IndicatorPropertiesHistogramParams) GetDataViewId() string` + +GetDataViewId returns the DataViewId field if non-nil, zero value otherwise. + +### GetDataViewIdOk + +`func (o *IndicatorPropertiesHistogramParams) GetDataViewIdOk() (*string, bool)` + +GetDataViewIdOk returns a tuple with the DataViewId field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDataViewId + +`func (o *IndicatorPropertiesHistogramParams) SetDataViewId(v string)` + +SetDataViewId sets DataViewId field to given value. + +### HasDataViewId + +`func (o *IndicatorPropertiesHistogramParams) HasDataViewId() bool` + +HasDataViewId returns a boolean if a field has been set. + ### GetFilter `func (o *IndicatorPropertiesHistogramParams) GetFilter() string` diff --git a/generated/slo/docs/IndicatorPropertiesTimesliceMetricParams.md b/generated/slo/docs/IndicatorPropertiesTimesliceMetricParams.md index ad08d9256..89e8825c0 100644 --- a/generated/slo/docs/IndicatorPropertiesTimesliceMetricParams.md +++ b/generated/slo/docs/IndicatorPropertiesTimesliceMetricParams.md @@ -5,6 +5,7 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- **Index** | **string** | The index or index pattern to use | +**DataViewId** | Pointer to **string** | The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. | [optional] **Filter** | Pointer to **string** | the KQL query to filter the documents with. | [optional] **TimestampField** | **string** | The timestamp field used in the source indice. | **Metric** | [**IndicatorPropertiesTimesliceMetricParamsMetric**](IndicatorPropertiesTimesliceMetricParamsMetric.md) | | @@ -48,6 +49,31 @@ and a boolean to check if the value has been set. SetIndex sets Index field to given value. +### GetDataViewId + +`func (o *IndicatorPropertiesTimesliceMetricParams) GetDataViewId() string` + +GetDataViewId returns the DataViewId field if non-nil, zero value otherwise. + +### GetDataViewIdOk + +`func (o *IndicatorPropertiesTimesliceMetricParams) GetDataViewIdOk() (*string, bool)` + +GetDataViewIdOk returns a tuple with the DataViewId field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDataViewId + +`func (o *IndicatorPropertiesTimesliceMetricParams) SetDataViewId(v string)` + +SetDataViewId sets DataViewId field to given value. + +### HasDataViewId + +`func (o *IndicatorPropertiesTimesliceMetricParams) HasDataViewId() bool` + +HasDataViewId returns a boolean if a field has been set. + ### GetFilter `func (o *IndicatorPropertiesTimesliceMetricParams) GetFilter() string` diff --git a/generated/slo/docs/KqlWithFilters.md b/generated/slo/docs/KqlWithFilters.md new file mode 100644 index 000000000..bf349231e --- /dev/null +++ b/generated/slo/docs/KqlWithFilters.md @@ -0,0 +1,82 @@ +# KqlWithFilters + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**KqlQuery** | Pointer to **string** | | [optional] +**Filters** | Pointer to [**[]Filter**](Filter.md) | | [optional] + +## Methods + +### NewKqlWithFilters + +`func NewKqlWithFilters() *KqlWithFilters` + +NewKqlWithFilters instantiates a new KqlWithFilters object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewKqlWithFiltersWithDefaults + +`func NewKqlWithFiltersWithDefaults() *KqlWithFilters` + +NewKqlWithFiltersWithDefaults instantiates a new KqlWithFilters object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetKqlQuery + +`func (o *KqlWithFilters) GetKqlQuery() string` + +GetKqlQuery returns the KqlQuery field if non-nil, zero value otherwise. + +### GetKqlQueryOk + +`func (o *KqlWithFilters) GetKqlQueryOk() (*string, bool)` + +GetKqlQueryOk returns a tuple with the KqlQuery field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetKqlQuery + +`func (o *KqlWithFilters) SetKqlQuery(v string)` + +SetKqlQuery sets KqlQuery field to given value. + +### HasKqlQuery + +`func (o *KqlWithFilters) HasKqlQuery() bool` + +HasKqlQuery returns a boolean if a field has been set. + +### GetFilters + +`func (o *KqlWithFilters) GetFilters() []Filter` + +GetFilters returns the Filters field if non-nil, zero value otherwise. + +### GetFiltersOk + +`func (o *KqlWithFilters) GetFiltersOk() (*[]Filter, bool)` + +GetFiltersOk returns a tuple with the Filters field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetFilters + +`func (o *KqlWithFilters) SetFilters(v []Filter)` + +SetFilters sets Filters field to given value. + +### HasFilters + +`func (o *KqlWithFilters) HasFilters() bool` + +HasFilters returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/KqlWithFiltersGood.md b/generated/slo/docs/KqlWithFiltersGood.md new file mode 100644 index 000000000..9fb63f991 --- /dev/null +++ b/generated/slo/docs/KqlWithFiltersGood.md @@ -0,0 +1,82 @@ +# KqlWithFiltersGood + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**KqlQuery** | Pointer to **string** | | [optional] +**Filters** | Pointer to [**[]Filter**](Filter.md) | | [optional] + +## Methods + +### NewKqlWithFiltersGood + +`func NewKqlWithFiltersGood() *KqlWithFiltersGood` + +NewKqlWithFiltersGood instantiates a new KqlWithFiltersGood object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewKqlWithFiltersGoodWithDefaults + +`func NewKqlWithFiltersGoodWithDefaults() *KqlWithFiltersGood` + +NewKqlWithFiltersGoodWithDefaults instantiates a new KqlWithFiltersGood object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetKqlQuery + +`func (o *KqlWithFiltersGood) GetKqlQuery() string` + +GetKqlQuery returns the KqlQuery field if non-nil, zero value otherwise. + +### GetKqlQueryOk + +`func (o *KqlWithFiltersGood) GetKqlQueryOk() (*string, bool)` + +GetKqlQueryOk returns a tuple with the KqlQuery field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetKqlQuery + +`func (o *KqlWithFiltersGood) SetKqlQuery(v string)` + +SetKqlQuery sets KqlQuery field to given value. + +### HasKqlQuery + +`func (o *KqlWithFiltersGood) HasKqlQuery() bool` + +HasKqlQuery returns a boolean if a field has been set. + +### GetFilters + +`func (o *KqlWithFiltersGood) GetFilters() []Filter` + +GetFilters returns the Filters field if non-nil, zero value otherwise. + +### GetFiltersOk + +`func (o *KqlWithFiltersGood) GetFiltersOk() (*[]Filter, bool)` + +GetFiltersOk returns a tuple with the Filters field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetFilters + +`func (o *KqlWithFiltersGood) SetFilters(v []Filter)` + +SetFilters sets Filters field to given value. + +### HasFilters + +`func (o *KqlWithFiltersGood) HasFilters() bool` + +HasFilters returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/KqlWithFiltersOneOf.md b/generated/slo/docs/KqlWithFiltersOneOf.md new file mode 100644 index 000000000..335fe503a --- /dev/null +++ b/generated/slo/docs/KqlWithFiltersOneOf.md @@ -0,0 +1,82 @@ +# KqlWithFiltersOneOf + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**KqlQuery** | Pointer to **string** | | [optional] +**Filters** | Pointer to [**[]Filter**](Filter.md) | | [optional] + +## Methods + +### NewKqlWithFiltersOneOf + +`func NewKqlWithFiltersOneOf() *KqlWithFiltersOneOf` + +NewKqlWithFiltersOneOf instantiates a new KqlWithFiltersOneOf object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewKqlWithFiltersOneOfWithDefaults + +`func NewKqlWithFiltersOneOfWithDefaults() *KqlWithFiltersOneOf` + +NewKqlWithFiltersOneOfWithDefaults instantiates a new KqlWithFiltersOneOf object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetKqlQuery + +`func (o *KqlWithFiltersOneOf) GetKqlQuery() string` + +GetKqlQuery returns the KqlQuery field if non-nil, zero value otherwise. + +### GetKqlQueryOk + +`func (o *KqlWithFiltersOneOf) GetKqlQueryOk() (*string, bool)` + +GetKqlQueryOk returns a tuple with the KqlQuery field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetKqlQuery + +`func (o *KqlWithFiltersOneOf) SetKqlQuery(v string)` + +SetKqlQuery sets KqlQuery field to given value. + +### HasKqlQuery + +`func (o *KqlWithFiltersOneOf) HasKqlQuery() bool` + +HasKqlQuery returns a boolean if a field has been set. + +### GetFilters + +`func (o *KqlWithFiltersOneOf) GetFilters() []Filter` + +GetFilters returns the Filters field if non-nil, zero value otherwise. + +### GetFiltersOk + +`func (o *KqlWithFiltersOneOf) GetFiltersOk() (*[]Filter, bool)` + +GetFiltersOk returns a tuple with the Filters field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetFilters + +`func (o *KqlWithFiltersOneOf) SetFilters(v []Filter)` + +SetFilters sets Filters field to given value. + +### HasFilters + +`func (o *KqlWithFiltersOneOf) HasFilters() bool` + +HasFilters returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/KqlWithFiltersTotal.md b/generated/slo/docs/KqlWithFiltersTotal.md new file mode 100644 index 000000000..afb7e2072 --- /dev/null +++ b/generated/slo/docs/KqlWithFiltersTotal.md @@ -0,0 +1,82 @@ +# KqlWithFiltersTotal + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**KqlQuery** | Pointer to **string** | | [optional] +**Filters** | Pointer to [**[]Filter**](Filter.md) | | [optional] + +## Methods + +### NewKqlWithFiltersTotal + +`func NewKqlWithFiltersTotal() *KqlWithFiltersTotal` + +NewKqlWithFiltersTotal instantiates a new KqlWithFiltersTotal object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewKqlWithFiltersTotalWithDefaults + +`func NewKqlWithFiltersTotalWithDefaults() *KqlWithFiltersTotal` + +NewKqlWithFiltersTotalWithDefaults instantiates a new KqlWithFiltersTotal object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetKqlQuery + +`func (o *KqlWithFiltersTotal) GetKqlQuery() string` + +GetKqlQuery returns the KqlQuery field if non-nil, zero value otherwise. + +### GetKqlQueryOk + +`func (o *KqlWithFiltersTotal) GetKqlQueryOk() (*string, bool)` + +GetKqlQueryOk returns a tuple with the KqlQuery field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetKqlQuery + +`func (o *KqlWithFiltersTotal) SetKqlQuery(v string)` + +SetKqlQuery sets KqlQuery field to given value. + +### HasKqlQuery + +`func (o *KqlWithFiltersTotal) HasKqlQuery() bool` + +HasKqlQuery returns a boolean if a field has been set. + +### GetFilters + +`func (o *KqlWithFiltersTotal) GetFilters() []Filter` + +GetFilters returns the Filters field if non-nil, zero value otherwise. + +### GetFiltersOk + +`func (o *KqlWithFiltersTotal) GetFiltersOk() (*[]Filter, bool)` + +GetFiltersOk returns a tuple with the Filters field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetFilters + +`func (o *KqlWithFiltersTotal) SetFilters(v []Filter)` + +SetFilters sets Filters field to given value. + +### HasFilters + +`func (o *KqlWithFiltersTotal) HasFilters() bool` + +HasFilters returns a boolean if a field has been set. + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/Settings.md b/generated/slo/docs/Settings.md index d501ad1eb..65528a3d1 100644 --- a/generated/slo/docs/Settings.md +++ b/generated/slo/docs/Settings.md @@ -4,8 +4,10 @@ Name | Type | Description | Notes ------------ | ------------- | ------------- | ------------- -**SyncDelay** | Pointer to **string** | The synch delay to apply to the transform. Default 1m | [optional] -**Frequency** | Pointer to **string** | Configure how often the transform runs, default 1m | [optional] +**SyncField** | Pointer to **string** | The date field that is used to identify new documents in the source. It is strongly recommended to use a field that contains the ingest timestamp. If you use a different field, you might need to set the delay such that it accounts for data transmission delays. When unspecified, we use the indicator timestamp field. | [optional] +**SyncDelay** | Pointer to **string** | The time delay in minutes between the current time and the latest source data time. Increasing the value will delay any alerting. The default value is 1 minute. The minimum value is 1m and the maximum is 359m. It should always be greater then source index refresh interval. | [optional] [default to "1m"] +**Frequency** | Pointer to **string** | The interval between checks for changes in the source data. The minimum value is 1m and the maximum is 59m. The default value is 1 minute. | [optional] [default to "1m"] +**PreventInitialBackfill** | Pointer to **bool** | Start aggregating data from the time the SLO is created, instead of backfilling data from the beginning of the time window. | [optional] [default to false] ## Methods @@ -26,6 +28,31 @@ NewSettingsWithDefaults instantiates a new Settings object This constructor will only assign default values to properties that have it defined, but it doesn't guarantee that properties required by API are set +### GetSyncField + +`func (o *Settings) GetSyncField() string` + +GetSyncField returns the SyncField field if non-nil, zero value otherwise. + +### GetSyncFieldOk + +`func (o *Settings) GetSyncFieldOk() (*string, bool)` + +GetSyncFieldOk returns a tuple with the SyncField field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSyncField + +`func (o *Settings) SetSyncField(v string)` + +SetSyncField sets SyncField field to given value. + +### HasSyncField + +`func (o *Settings) HasSyncField() bool` + +HasSyncField returns a boolean if a field has been set. + ### GetSyncDelay `func (o *Settings) GetSyncDelay() string` @@ -76,6 +103,31 @@ SetFrequency sets Frequency field to given value. HasFrequency returns a boolean if a field has been set. +### GetPreventInitialBackfill + +`func (o *Settings) GetPreventInitialBackfill() bool` + +GetPreventInitialBackfill returns the PreventInitialBackfill field if non-nil, zero value otherwise. + +### GetPreventInitialBackfillOk + +`func (o *Settings) GetPreventInitialBackfillOk() (*bool, bool)` + +GetPreventInitialBackfillOk returns a tuple with the PreventInitialBackfill field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetPreventInitialBackfill + +`func (o *Settings) SetPreventInitialBackfill(v bool)` + +SetPreventInitialBackfill sets PreventInitialBackfill field to given value. + +### HasPreventInitialBackfill + +`func (o *Settings) HasPreventInitialBackfill() bool` + +HasPreventInitialBackfill returns a boolean if a field has been set. + [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/generated/slo/docs/SloApi.md b/generated/slo/docs/SloApi.md index 7297aeebf..65599d3f9 100644 --- a/generated/slo/docs/SloApi.md +++ b/generated/slo/docs/SloApi.md @@ -1,26 +1,179 @@ # \SloAPI -All URIs are relative to *http://localhost:5601* +All URIs are relative to *https://localhost:5601* Method | HTTP request | Description ------------- | ------------- | ------------- -[**CreateSloOp**](SloAPI.md#CreateSloOp) | **Post** /s/{spaceId}/api/observability/slos | Creates an SLO. -[**DeleteSloInstancesOp**](SloAPI.md#DeleteSloInstancesOp) | **Post** /s/{spaceId}/api/observability/slos/_delete_instances | Batch delete rollup and summary data for the matching list of sloId and instanceId -[**DeleteSloOp**](SloAPI.md#DeleteSloOp) | **Delete** /s/{spaceId}/api/observability/slos/{sloId} | Deletes an SLO -[**DisableSloOp**](SloAPI.md#DisableSloOp) | **Post** /s/{spaceId}/api/observability/slos/{sloId}/disable | Disables an SLO -[**EnableSloOp**](SloAPI.md#EnableSloOp) | **Post** /s/{spaceId}/api/observability/slos/{sloId}/enable | Enables an SLO -[**FindSlosOp**](SloAPI.md#FindSlosOp) | **Get** /s/{spaceId}/api/observability/slos | Retrieves a paginated list of SLOs -[**GetSloOp**](SloAPI.md#GetSloOp) | **Get** /s/{spaceId}/api/observability/slos/{sloId} | Retrieves a SLO -[**HistoricalSummaryOp**](SloAPI.md#HistoricalSummaryOp) | **Post** /s/{spaceId}/internal/observability/slos/_historical_summary | Retrieves the historical summary for a list of SLOs -[**UpdateSloOp**](SloAPI.md#UpdateSloOp) | **Put** /s/{spaceId}/api/observability/slos/{sloId} | Updates an SLO +[**BulkDeleteOp**](SloAPI.md#BulkDeleteOp) | **Post** /s/{spaceId}/api/observability/slos/_bulk_delete | Bulk delete SLO definitions and their associated summary and rollup data. +[**BulkDeleteStatusOp**](SloAPI.md#BulkDeleteStatusOp) | **Get** /s/{spaceId}/api/observability/slos/_bulk_delete/{taskId} | Retrieve the status of the bulk deletion +[**CreateSloOp**](SloAPI.md#CreateSloOp) | **Post** /s/{spaceId}/api/observability/slos | Create an SLO +[**DeleteRollupDataOp**](SloAPI.md#DeleteRollupDataOp) | **Post** /s/{spaceId}/api/observability/slos/_bulk_purge_rollup | Batch delete rollup and summary data +[**DeleteSloInstancesOp**](SloAPI.md#DeleteSloInstancesOp) | **Post** /s/{spaceId}/api/observability/slos/_delete_instances | Batch delete rollup and summary data +[**DeleteSloOp**](SloAPI.md#DeleteSloOp) | **Delete** /s/{spaceId}/api/observability/slos/{sloId} | Delete an SLO +[**DisableSloOp**](SloAPI.md#DisableSloOp) | **Post** /s/{spaceId}/api/observability/slos/{sloId}/disable | Disable an SLO +[**EnableSloOp**](SloAPI.md#EnableSloOp) | **Post** /s/{spaceId}/api/observability/slos/{sloId}/enable | Enable an SLO +[**FindSlosOp**](SloAPI.md#FindSlosOp) | **Get** /s/{spaceId}/api/observability/slos | Get a paginated list of SLOs +[**GetDefinitionsOp**](SloAPI.md#GetDefinitionsOp) | **Get** /s/{spaceId}/internal/observability/slos/_definitions | Get the SLO definitions +[**GetSloOp**](SloAPI.md#GetSloOp) | **Get** /s/{spaceId}/api/observability/slos/{sloId} | Get an SLO +[**ResetSloOp**](SloAPI.md#ResetSloOp) | **Post** /s/{spaceId}/api/observability/slos/{sloId}/_reset | Reset an SLO +[**UpdateSloOp**](SloAPI.md#UpdateSloOp) | **Put** /s/{spaceId}/api/observability/slos/{sloId} | Update an SLO +## BulkDeleteOp + +> BulkDeleteResponse BulkDeleteOp(ctx, spaceId).KbnXsrf(kbnXsrf).BulkDeleteRequest(bulkDeleteRequest).Execute() + +Bulk delete SLO definitions and their associated summary and rollup data. + + + +### Example + +```go +package main + +import ( + "context" + "fmt" + "os" + openapiclient "github.com/elastic/terraform-provider-elasticstack/slo" +) + +func main() { + kbnXsrf := "kbnXsrf_example" // string | Cross-site request forgery protection + spaceId := "default" // string | An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + bulkDeleteRequest := *openapiclient.NewBulkDeleteRequest([]string{"8853df00-ae2e-11ed-90af-09bb6422b258"}) // BulkDeleteRequest | + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.SloAPI.BulkDeleteOp(context.Background(), spaceId).KbnXsrf(kbnXsrf).BulkDeleteRequest(bulkDeleteRequest).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `SloAPI.BulkDeleteOp``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `BulkDeleteOp`: BulkDeleteResponse + fmt.Fprintf(os.Stdout, "Response from `SloAPI.BulkDeleteOp`: %v\n", resp) +} +``` + +### Path Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- +**ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. +**spaceId** | **string** | An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. | + +### Other Parameters + +Other parameters are passed through a pointer to a apiBulkDeleteOpRequest struct via the builder pattern + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **kbnXsrf** | **string** | Cross-site request forgery protection | + + **bulkDeleteRequest** | [**BulkDeleteRequest**](BulkDeleteRequest.md) | | + +### Return type + +[**BulkDeleteResponse**](BulkDeleteResponse.md) + +### Authorization + +[basicAuth](../README.md#basicAuth), [apiKeyAuth](../README.md#apiKeyAuth) + +### HTTP request headers + +- **Content-Type**: application/json +- **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) +[[Back to Model list]](../README.md#documentation-for-models) +[[Back to README]](../README.md) + + +## BulkDeleteStatusOp + +> BulkDeleteStatusResponse BulkDeleteStatusOp(ctx, spaceId, taskId).KbnXsrf(kbnXsrf).Execute() + +Retrieve the status of the bulk deletion + + + +### Example + +```go +package main + +import ( + "context" + "fmt" + "os" + openapiclient "github.com/elastic/terraform-provider-elasticstack/slo" +) + +func main() { + kbnXsrf := "kbnXsrf_example" // string | Cross-site request forgery protection + spaceId := "default" // string | An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + taskId := "8853df00-ae2e-11ed-90af-09bb6422b258" // string | The task id of the bulk delete operation + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.SloAPI.BulkDeleteStatusOp(context.Background(), spaceId, taskId).KbnXsrf(kbnXsrf).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `SloAPI.BulkDeleteStatusOp``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `BulkDeleteStatusOp`: BulkDeleteStatusResponse + fmt.Fprintf(os.Stdout, "Response from `SloAPI.BulkDeleteStatusOp`: %v\n", resp) +} +``` + +### Path Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- +**ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. +**spaceId** | **string** | An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. | +**taskId** | **string** | The task id of the bulk delete operation | + +### Other Parameters + +Other parameters are passed through a pointer to a apiBulkDeleteStatusOpRequest struct via the builder pattern + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **kbnXsrf** | **string** | Cross-site request forgery protection | + + + +### Return type + +[**BulkDeleteStatusResponse**](BulkDeleteStatusResponse.md) + +### Authorization + +[basicAuth](../README.md#basicAuth), [apiKeyAuth](../README.md#apiKeyAuth) + +### HTTP request headers + +- **Content-Type**: Not defined +- **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) +[[Back to Model list]](../README.md#documentation-for-models) +[[Back to README]](../README.md) + + ## CreateSloOp > CreateSloResponse CreateSloOp(ctx, spaceId).KbnXsrf(kbnXsrf).CreateSloRequest(createSloRequest).Execute() -Creates an SLO. +Create an SLO @@ -90,11 +243,85 @@ Name | Type | Description | Notes [[Back to README]](../README.md) +## DeleteRollupDataOp + +> BulkPurgeRollupResponse DeleteRollupDataOp(ctx, spaceId).KbnXsrf(kbnXsrf).BulkPurgeRollupRequest(bulkPurgeRollupRequest).Execute() + +Batch delete rollup and summary data + + + +### Example + +```go +package main + +import ( + "context" + "fmt" + "os" + openapiclient "github.com/elastic/terraform-provider-elasticstack/slo" +) + +func main() { + kbnXsrf := "kbnXsrf_example" // string | Cross-site request forgery protection + spaceId := "default" // string | An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + bulkPurgeRollupRequest := *openapiclient.NewBulkPurgeRollupRequest([]string{"8853df00-ae2e-11ed-90af-09bb6422b258"}, openapiclient.bulk_purge_rollup_request_purgePolicy{BulkPurgeRollupRequestPurgePolicyOneOf: openapiclient.NewBulkPurgeRollupRequestPurgePolicyOneOf()}) // BulkPurgeRollupRequest | + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.SloAPI.DeleteRollupDataOp(context.Background(), spaceId).KbnXsrf(kbnXsrf).BulkPurgeRollupRequest(bulkPurgeRollupRequest).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `SloAPI.DeleteRollupDataOp``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `DeleteRollupDataOp`: BulkPurgeRollupResponse + fmt.Fprintf(os.Stdout, "Response from `SloAPI.DeleteRollupDataOp`: %v\n", resp) +} +``` + +### Path Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- +**ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. +**spaceId** | **string** | An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. | + +### Other Parameters + +Other parameters are passed through a pointer to a apiDeleteRollupDataOpRequest struct via the builder pattern + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **kbnXsrf** | **string** | Cross-site request forgery protection | + + **bulkPurgeRollupRequest** | [**BulkPurgeRollupRequest**](BulkPurgeRollupRequest.md) | | + +### Return type + +[**BulkPurgeRollupResponse**](BulkPurgeRollupResponse.md) + +### Authorization + +[basicAuth](../README.md#basicAuth), [apiKeyAuth](../README.md#apiKeyAuth) + +### HTTP request headers + +- **Content-Type**: application/json +- **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) +[[Back to Model list]](../README.md#documentation-for-models) +[[Back to README]](../README.md) + + ## DeleteSloInstancesOp > DeleteSloInstancesOp(ctx, spaceId).KbnXsrf(kbnXsrf).DeleteSloInstancesRequest(deleteSloInstancesRequest).Execute() -Batch delete rollup and summary data for the matching list of sloId and instanceId +Batch delete rollup and summary data @@ -166,7 +393,7 @@ Name | Type | Description | Notes > DeleteSloOp(ctx, spaceId, sloId).KbnXsrf(kbnXsrf).Execute() -Deletes an SLO +Delete an SLO @@ -239,7 +466,7 @@ Name | Type | Description | Notes > DisableSloOp(ctx, spaceId, sloId).KbnXsrf(kbnXsrf).Execute() -Disables an SLO +Disable an SLO @@ -312,7 +539,7 @@ Name | Type | Description | Notes > EnableSloOp(ctx, spaceId, sloId).KbnXsrf(kbnXsrf).Execute() -Enables an SLO +Enable an SLO @@ -383,9 +610,9 @@ Name | Type | Description | Notes ## FindSlosOp -> FindSloResponse FindSlosOp(ctx, spaceId).KbnXsrf(kbnXsrf).KqlQuery(kqlQuery).Page(page).PerPage(perPage).SortBy(sortBy).SortDirection(sortDirection).Execute() +> FindSloResponse FindSlosOp(ctx, spaceId).KbnXsrf(kbnXsrf).KqlQuery(kqlQuery).Size(size).SearchAfter(searchAfter).Page(page).PerPage(perPage).SortBy(sortBy).SortDirection(sortDirection).HideStale(hideStale).Execute() -Retrieves a paginated list of SLOs +Get a paginated list of SLOs @@ -405,14 +632,17 @@ func main() { kbnXsrf := "kbnXsrf_example" // string | Cross-site request forgery protection spaceId := "default" // string | An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. kqlQuery := "slo.name:latency* and slo.tags : "prod"" // string | A valid kql query to filter the SLO with (optional) - page := int32(1) // int32 | The page number to return (optional) (default to 1) - perPage := int32(25) // int32 | The number of SLOs to return per page (optional) (default to 25) + size := int32(1) // int32 | The page size to use for cursor-based pagination, must be greater or equal than 1 (optional) (default to 1) + searchAfter := []string{"Inner_example"} // []string | The cursor to use for fetching the results from, when using a cursor-base pagination. (optional) + page := int32(1) // int32 | The page to use for pagination, must be greater or equal than 1 (optional) (default to 1) + perPage := int32(25) // int32 | Number of SLOs returned by page (optional) (default to 25) sortBy := "status" // string | Sort by field (optional) (default to "status") sortDirection := "asc" // string | Sort order (optional) (default to "asc") + hideStale := true // bool | Hide stale SLOs from the list as defined by stale SLO threshold in SLO settings (optional) configuration := openapiclient.NewConfiguration() apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.SloAPI.FindSlosOp(context.Background(), spaceId).KbnXsrf(kbnXsrf).KqlQuery(kqlQuery).Page(page).PerPage(perPage).SortBy(sortBy).SortDirection(sortDirection).Execute() + resp, r, err := apiClient.SloAPI.FindSlosOp(context.Background(), spaceId).KbnXsrf(kbnXsrf).KqlQuery(kqlQuery).Size(size).SearchAfter(searchAfter).Page(page).PerPage(perPage).SortBy(sortBy).SortDirection(sortDirection).HideStale(hideStale).Execute() if err != nil { fmt.Fprintf(os.Stderr, "Error when calling `SloAPI.FindSlosOp``: %v\n", err) fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) @@ -440,10 +670,13 @@ Name | Type | Description | Notes **kbnXsrf** | **string** | Cross-site request forgery protection | **kqlQuery** | **string** | A valid kql query to filter the SLO with | - **page** | **int32** | The page number to return | [default to 1] - **perPage** | **int32** | The number of SLOs to return per page | [default to 25] + **size** | **int32** | The page size to use for cursor-based pagination, must be greater or equal than 1 | [default to 1] + **searchAfter** | **[]string** | The cursor to use for fetching the results from, when using a cursor-base pagination. | + **page** | **int32** | The page to use for pagination, must be greater or equal than 1 | [default to 1] + **perPage** | **int32** | Number of SLOs returned by page | [default to 25] **sortBy** | **string** | Sort by field | [default to "status"] **sortDirection** | **string** | Sort order | [default to "asc"] + **hideStale** | **bool** | Hide stale SLOs from the list as defined by stale SLO threshold in SLO settings | ### Return type @@ -463,11 +696,93 @@ Name | Type | Description | Notes [[Back to README]](../README.md) +## GetDefinitionsOp + +> FindSloDefinitionsResponse GetDefinitionsOp(ctx, spaceId).KbnXsrf(kbnXsrf).IncludeOutdatedOnly(includeOutdatedOnly).Tags(tags).Search(search).Page(page).PerPage(perPage).Execute() + +Get the SLO definitions + + + +### Example + +```go +package main + +import ( + "context" + "fmt" + "os" + openapiclient "github.com/elastic/terraform-provider-elasticstack/slo" +) + +func main() { + kbnXsrf := "kbnXsrf_example" // string | Cross-site request forgery protection + spaceId := "default" // string | An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. + includeOutdatedOnly := true // bool | Indicates if the API returns only outdated SLO or all SLO definitions (optional) + tags := "tags_example" // string | Filters the SLOs by tag (optional) + search := "my service availability" // string | Filters the SLOs by name (optional) + page := float64(1) // float64 | The page to use for pagination, must be greater or equal than 1 (optional) + perPage := int32(100) // int32 | Number of SLOs returned by page (optional) (default to 100) + + configuration := openapiclient.NewConfiguration() + apiClient := openapiclient.NewAPIClient(configuration) + resp, r, err := apiClient.SloAPI.GetDefinitionsOp(context.Background(), spaceId).KbnXsrf(kbnXsrf).IncludeOutdatedOnly(includeOutdatedOnly).Tags(tags).Search(search).Page(page).PerPage(perPage).Execute() + if err != nil { + fmt.Fprintf(os.Stderr, "Error when calling `SloAPI.GetDefinitionsOp``: %v\n", err) + fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) + } + // response from `GetDefinitionsOp`: FindSloDefinitionsResponse + fmt.Fprintf(os.Stdout, "Response from `SloAPI.GetDefinitionsOp`: %v\n", resp) +} +``` + +### Path Parameters + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- +**ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. +**spaceId** | **string** | An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. | + +### Other Parameters + +Other parameters are passed through a pointer to a apiGetDefinitionsOpRequest struct via the builder pattern + + +Name | Type | Description | Notes +------------- | ------------- | ------------- | ------------- + **kbnXsrf** | **string** | Cross-site request forgery protection | + + **includeOutdatedOnly** | **bool** | Indicates if the API returns only outdated SLO or all SLO definitions | + **tags** | **string** | Filters the SLOs by tag | + **search** | **string** | Filters the SLOs by name | + **page** | **float64** | The page to use for pagination, must be greater or equal than 1 | + **perPage** | **int32** | Number of SLOs returned by page | [default to 100] + +### Return type + +[**FindSloDefinitionsResponse**](FindSloDefinitionsResponse.md) + +### Authorization + +[basicAuth](../README.md#basicAuth), [apiKeyAuth](../README.md#apiKeyAuth) + +### HTTP request headers + +- **Content-Type**: Not defined +- **Accept**: application/json + +[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) +[[Back to Model list]](../README.md#documentation-for-models) +[[Back to README]](../README.md) + + ## GetSloOp -> SloResponse GetSloOp(ctx, spaceId, sloId).KbnXsrf(kbnXsrf).InstanceId(instanceId).Execute() +> SloWithSummaryResponse GetSloOp(ctx, spaceId, sloId).KbnXsrf(kbnXsrf).InstanceId(instanceId).Execute() -Retrieves a SLO +Get an SLO @@ -496,7 +811,7 @@ func main() { fmt.Fprintf(os.Stderr, "Error when calling `SloAPI.GetSloOp``: %v\n", err) fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) } - // response from `GetSloOp`: SloResponse + // response from `GetSloOp`: SloWithSummaryResponse fmt.Fprintf(os.Stdout, "Response from `SloAPI.GetSloOp`: %v\n", resp) } ``` @@ -524,7 +839,7 @@ Name | Type | Description | Notes ### Return type -[**SloResponse**](SloResponse.md) +[**SloWithSummaryResponse**](SloWithSummaryResponse.md) ### Authorization @@ -540,11 +855,11 @@ Name | Type | Description | Notes [[Back to README]](../README.md) -## HistoricalSummaryOp +## ResetSloOp -> map[string][]HistoricalSummaryResponseInner HistoricalSummaryOp(ctx, spaceId).KbnXsrf(kbnXsrf).HistoricalSummaryRequest(historicalSummaryRequest).Execute() +> SloDefinitionResponse ResetSloOp(ctx, spaceId, sloId).KbnXsrf(kbnXsrf).Execute() -Retrieves the historical summary for a list of SLOs +Reset an SLO @@ -563,17 +878,17 @@ import ( func main() { kbnXsrf := "kbnXsrf_example" // string | Cross-site request forgery protection spaceId := "default" // string | An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. - historicalSummaryRequest := *openapiclient.NewHistoricalSummaryRequest([]string{"8853df00-ae2e-11ed-90af-09bb6422b258"}) // HistoricalSummaryRequest | + sloId := "9c235211-6834-11ea-a78c-6feb38a34414" // string | An identifier for the slo. configuration := openapiclient.NewConfiguration() apiClient := openapiclient.NewAPIClient(configuration) - resp, r, err := apiClient.SloAPI.HistoricalSummaryOp(context.Background(), spaceId).KbnXsrf(kbnXsrf).HistoricalSummaryRequest(historicalSummaryRequest).Execute() + resp, r, err := apiClient.SloAPI.ResetSloOp(context.Background(), spaceId, sloId).KbnXsrf(kbnXsrf).Execute() if err != nil { - fmt.Fprintf(os.Stderr, "Error when calling `SloAPI.HistoricalSummaryOp``: %v\n", err) + fmt.Fprintf(os.Stderr, "Error when calling `SloAPI.ResetSloOp``: %v\n", err) fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) } - // response from `HistoricalSummaryOp`: map[string][]HistoricalSummaryResponseInner - fmt.Fprintf(os.Stdout, "Response from `SloAPI.HistoricalSummaryOp`: %v\n", resp) + // response from `ResetSloOp`: SloDefinitionResponse + fmt.Fprintf(os.Stdout, "Response from `SloAPI.ResetSloOp`: %v\n", resp) } ``` @@ -584,21 +899,22 @@ Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **ctx** | **context.Context** | context for authentication, logging, cancellation, deadlines, tracing, etc. **spaceId** | **string** | An identifier for the space. If `/s/` and the identifier are omitted from the path, the default space is used. | +**sloId** | **string** | An identifier for the slo. | ### Other Parameters -Other parameters are passed through a pointer to a apiHistoricalSummaryOpRequest struct via the builder pattern +Other parameters are passed through a pointer to a apiResetSloOpRequest struct via the builder pattern Name | Type | Description | Notes ------------- | ------------- | ------------- | ------------- **kbnXsrf** | **string** | Cross-site request forgery protection | - **historicalSummaryRequest** | [**HistoricalSummaryRequest**](HistoricalSummaryRequest.md) | | + ### Return type -[**map[string][]HistoricalSummaryResponseInner**](array.md) +[**SloDefinitionResponse**](SloDefinitionResponse.md) ### Authorization @@ -606,7 +922,7 @@ Name | Type | Description | Notes ### HTTP request headers -- **Content-Type**: application/json +- **Content-Type**: Not defined - **Accept**: application/json [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) @@ -616,9 +932,9 @@ Name | Type | Description | Notes ## UpdateSloOp -> SloResponse UpdateSloOp(ctx, spaceId, sloId).KbnXsrf(kbnXsrf).UpdateSloRequest(updateSloRequest).Execute() +> SloDefinitionResponse UpdateSloOp(ctx, spaceId, sloId).KbnXsrf(kbnXsrf).UpdateSloRequest(updateSloRequest).Execute() -Updates an SLO +Update an SLO @@ -647,7 +963,7 @@ func main() { fmt.Fprintf(os.Stderr, "Error when calling `SloAPI.UpdateSloOp``: %v\n", err) fmt.Fprintf(os.Stderr, "Full HTTP response: %v\n", r) } - // response from `UpdateSloOp`: SloResponse + // response from `UpdateSloOp`: SloDefinitionResponse fmt.Fprintf(os.Stdout, "Response from `SloAPI.UpdateSloOp`: %v\n", resp) } ``` @@ -675,7 +991,7 @@ Name | Type | Description | Notes ### Return type -[**SloResponse**](SloResponse.md) +[**SloDefinitionResponse**](SloDefinitionResponse.md) ### Authorization diff --git a/generated/slo/docs/SloResponse.md b/generated/slo/docs/SloDefinitionResponse.md similarity index 60% rename from generated/slo/docs/SloResponse.md rename to generated/slo/docs/SloDefinitionResponse.md index 071b3ecd8..5fdaa4081 100644 --- a/generated/slo/docs/SloResponse.md +++ b/generated/slo/docs/SloDefinitionResponse.md @@ -1,4 +1,4 @@ -# SloResponse +# SloDefinitionResponse ## Properties @@ -7,359 +7,338 @@ Name | Type | Description | Notes **Id** | **string** | The identifier of the SLO. | **Name** | **string** | The name of the SLO. | **Description** | **string** | The description of the SLO. | -**Indicator** | [**SloResponseIndicator**](SloResponseIndicator.md) | | +**Indicator** | [**SloWithSummaryResponseIndicator**](SloWithSummaryResponseIndicator.md) | | **TimeWindow** | [**TimeWindow**](TimeWindow.md) | | **BudgetingMethod** | [**BudgetingMethod**](BudgetingMethod.md) | | **Objective** | [**Objective**](Objective.md) | | **Settings** | [**Settings**](Settings.md) | | **Revision** | **float64** | The SLO revision | -**Summary** | [**Summary**](Summary.md) | | **Enabled** | **bool** | Indicate if the SLO is enabled | -**GroupBy** | [**SloResponseGroupBy**](SloResponseGroupBy.md) | | -**InstanceId** | **string** | the value derived from the groupBy field, if present, otherwise '*' | +**GroupBy** | [**GroupBy**](GroupBy.md) | | **Tags** | **[]string** | List of tags | **CreatedAt** | **string** | The creation date | **UpdatedAt** | **string** | The last update date | +**Version** | **float64** | The internal SLO version | ## Methods -### NewSloResponse +### NewSloDefinitionResponse -`func NewSloResponse(id string, name string, description string, indicator SloResponseIndicator, timeWindow TimeWindow, budgetingMethod BudgetingMethod, objective Objective, settings Settings, revision float64, summary Summary, enabled bool, groupBy SloResponseGroupBy, instanceId string, tags []string, createdAt string, updatedAt string, ) *SloResponse` +`func NewSloDefinitionResponse(id string, name string, description string, indicator SloWithSummaryResponseIndicator, timeWindow TimeWindow, budgetingMethod BudgetingMethod, objective Objective, settings Settings, revision float64, enabled bool, groupBy GroupBy, tags []string, createdAt string, updatedAt string, version float64, ) *SloDefinitionResponse` -NewSloResponse instantiates a new SloResponse object +NewSloDefinitionResponse instantiates a new SloDefinitionResponse object This constructor will assign default values to properties that have it defined, and makes sure properties required by API are set, but the set of arguments will change when the set of required properties is changed -### NewSloResponseWithDefaults +### NewSloDefinitionResponseWithDefaults -`func NewSloResponseWithDefaults() *SloResponse` +`func NewSloDefinitionResponseWithDefaults() *SloDefinitionResponse` -NewSloResponseWithDefaults instantiates a new SloResponse object +NewSloDefinitionResponseWithDefaults instantiates a new SloDefinitionResponse object This constructor will only assign default values to properties that have it defined, but it doesn't guarantee that properties required by API are set ### GetId -`func (o *SloResponse) GetId() string` +`func (o *SloDefinitionResponse) GetId() string` GetId returns the Id field if non-nil, zero value otherwise. ### GetIdOk -`func (o *SloResponse) GetIdOk() (*string, bool)` +`func (o *SloDefinitionResponse) GetIdOk() (*string, bool)` GetIdOk returns a tuple with the Id field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetId -`func (o *SloResponse) SetId(v string)` +`func (o *SloDefinitionResponse) SetId(v string)` SetId sets Id field to given value. ### GetName -`func (o *SloResponse) GetName() string` +`func (o *SloDefinitionResponse) GetName() string` GetName returns the Name field if non-nil, zero value otherwise. ### GetNameOk -`func (o *SloResponse) GetNameOk() (*string, bool)` +`func (o *SloDefinitionResponse) GetNameOk() (*string, bool)` GetNameOk returns a tuple with the Name field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetName -`func (o *SloResponse) SetName(v string)` +`func (o *SloDefinitionResponse) SetName(v string)` SetName sets Name field to given value. ### GetDescription -`func (o *SloResponse) GetDescription() string` +`func (o *SloDefinitionResponse) GetDescription() string` GetDescription returns the Description field if non-nil, zero value otherwise. ### GetDescriptionOk -`func (o *SloResponse) GetDescriptionOk() (*string, bool)` +`func (o *SloDefinitionResponse) GetDescriptionOk() (*string, bool)` GetDescriptionOk returns a tuple with the Description field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetDescription -`func (o *SloResponse) SetDescription(v string)` +`func (o *SloDefinitionResponse) SetDescription(v string)` SetDescription sets Description field to given value. ### GetIndicator -`func (o *SloResponse) GetIndicator() SloResponseIndicator` +`func (o *SloDefinitionResponse) GetIndicator() SloWithSummaryResponseIndicator` GetIndicator returns the Indicator field if non-nil, zero value otherwise. ### GetIndicatorOk -`func (o *SloResponse) GetIndicatorOk() (*SloResponseIndicator, bool)` +`func (o *SloDefinitionResponse) GetIndicatorOk() (*SloWithSummaryResponseIndicator, bool)` GetIndicatorOk returns a tuple with the Indicator field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetIndicator -`func (o *SloResponse) SetIndicator(v SloResponseIndicator)` +`func (o *SloDefinitionResponse) SetIndicator(v SloWithSummaryResponseIndicator)` SetIndicator sets Indicator field to given value. ### GetTimeWindow -`func (o *SloResponse) GetTimeWindow() TimeWindow` +`func (o *SloDefinitionResponse) GetTimeWindow() TimeWindow` GetTimeWindow returns the TimeWindow field if non-nil, zero value otherwise. ### GetTimeWindowOk -`func (o *SloResponse) GetTimeWindowOk() (*TimeWindow, bool)` +`func (o *SloDefinitionResponse) GetTimeWindowOk() (*TimeWindow, bool)` GetTimeWindowOk returns a tuple with the TimeWindow field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetTimeWindow -`func (o *SloResponse) SetTimeWindow(v TimeWindow)` +`func (o *SloDefinitionResponse) SetTimeWindow(v TimeWindow)` SetTimeWindow sets TimeWindow field to given value. ### GetBudgetingMethod -`func (o *SloResponse) GetBudgetingMethod() BudgetingMethod` +`func (o *SloDefinitionResponse) GetBudgetingMethod() BudgetingMethod` GetBudgetingMethod returns the BudgetingMethod field if non-nil, zero value otherwise. ### GetBudgetingMethodOk -`func (o *SloResponse) GetBudgetingMethodOk() (*BudgetingMethod, bool)` +`func (o *SloDefinitionResponse) GetBudgetingMethodOk() (*BudgetingMethod, bool)` GetBudgetingMethodOk returns a tuple with the BudgetingMethod field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetBudgetingMethod -`func (o *SloResponse) SetBudgetingMethod(v BudgetingMethod)` +`func (o *SloDefinitionResponse) SetBudgetingMethod(v BudgetingMethod)` SetBudgetingMethod sets BudgetingMethod field to given value. ### GetObjective -`func (o *SloResponse) GetObjective() Objective` +`func (o *SloDefinitionResponse) GetObjective() Objective` GetObjective returns the Objective field if non-nil, zero value otherwise. ### GetObjectiveOk -`func (o *SloResponse) GetObjectiveOk() (*Objective, bool)` +`func (o *SloDefinitionResponse) GetObjectiveOk() (*Objective, bool)` GetObjectiveOk returns a tuple with the Objective field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetObjective -`func (o *SloResponse) SetObjective(v Objective)` +`func (o *SloDefinitionResponse) SetObjective(v Objective)` SetObjective sets Objective field to given value. ### GetSettings -`func (o *SloResponse) GetSettings() Settings` +`func (o *SloDefinitionResponse) GetSettings() Settings` GetSettings returns the Settings field if non-nil, zero value otherwise. ### GetSettingsOk -`func (o *SloResponse) GetSettingsOk() (*Settings, bool)` +`func (o *SloDefinitionResponse) GetSettingsOk() (*Settings, bool)` GetSettingsOk returns a tuple with the Settings field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetSettings -`func (o *SloResponse) SetSettings(v Settings)` +`func (o *SloDefinitionResponse) SetSettings(v Settings)` SetSettings sets Settings field to given value. ### GetRevision -`func (o *SloResponse) GetRevision() float64` +`func (o *SloDefinitionResponse) GetRevision() float64` GetRevision returns the Revision field if non-nil, zero value otherwise. ### GetRevisionOk -`func (o *SloResponse) GetRevisionOk() (*float64, bool)` +`func (o *SloDefinitionResponse) GetRevisionOk() (*float64, bool)` GetRevisionOk returns a tuple with the Revision field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetRevision -`func (o *SloResponse) SetRevision(v float64)` +`func (o *SloDefinitionResponse) SetRevision(v float64)` SetRevision sets Revision field to given value. -### GetSummary - -`func (o *SloResponse) GetSummary() Summary` - -GetSummary returns the Summary field if non-nil, zero value otherwise. - -### GetSummaryOk - -`func (o *SloResponse) GetSummaryOk() (*Summary, bool)` - -GetSummaryOk returns a tuple with the Summary field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetSummary - -`func (o *SloResponse) SetSummary(v Summary)` - -SetSummary sets Summary field to given value. - - ### GetEnabled -`func (o *SloResponse) GetEnabled() bool` +`func (o *SloDefinitionResponse) GetEnabled() bool` GetEnabled returns the Enabled field if non-nil, zero value otherwise. ### GetEnabledOk -`func (o *SloResponse) GetEnabledOk() (*bool, bool)` +`func (o *SloDefinitionResponse) GetEnabledOk() (*bool, bool)` GetEnabledOk returns a tuple with the Enabled field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetEnabled -`func (o *SloResponse) SetEnabled(v bool)` +`func (o *SloDefinitionResponse) SetEnabled(v bool)` SetEnabled sets Enabled field to given value. ### GetGroupBy -`func (o *SloResponse) GetGroupBy() SloResponseGroupBy` +`func (o *SloDefinitionResponse) GetGroupBy() GroupBy` GetGroupBy returns the GroupBy field if non-nil, zero value otherwise. ### GetGroupByOk -`func (o *SloResponse) GetGroupByOk() (*SloResponseGroupBy, bool)` +`func (o *SloDefinitionResponse) GetGroupByOk() (*GroupBy, bool)` GetGroupByOk returns a tuple with the GroupBy field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetGroupBy -`func (o *SloResponse) SetGroupBy(v SloResponseGroupBy)` +`func (o *SloDefinitionResponse) SetGroupBy(v GroupBy)` SetGroupBy sets GroupBy field to given value. -### GetInstanceId - -`func (o *SloResponse) GetInstanceId() string` - -GetInstanceId returns the InstanceId field if non-nil, zero value otherwise. - -### GetInstanceIdOk - -`func (o *SloResponse) GetInstanceIdOk() (*string, bool)` - -GetInstanceIdOk returns a tuple with the InstanceId field if it's non-nil, zero value otherwise -and a boolean to check if the value has been set. - -### SetInstanceId - -`func (o *SloResponse) SetInstanceId(v string)` - -SetInstanceId sets InstanceId field to given value. - - ### GetTags -`func (o *SloResponse) GetTags() []string` +`func (o *SloDefinitionResponse) GetTags() []string` GetTags returns the Tags field if non-nil, zero value otherwise. ### GetTagsOk -`func (o *SloResponse) GetTagsOk() (*[]string, bool)` +`func (o *SloDefinitionResponse) GetTagsOk() (*[]string, bool)` GetTagsOk returns a tuple with the Tags field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetTags -`func (o *SloResponse) SetTags(v []string)` +`func (o *SloDefinitionResponse) SetTags(v []string)` SetTags sets Tags field to given value. ### GetCreatedAt -`func (o *SloResponse) GetCreatedAt() string` +`func (o *SloDefinitionResponse) GetCreatedAt() string` GetCreatedAt returns the CreatedAt field if non-nil, zero value otherwise. ### GetCreatedAtOk -`func (o *SloResponse) GetCreatedAtOk() (*string, bool)` +`func (o *SloDefinitionResponse) GetCreatedAtOk() (*string, bool)` GetCreatedAtOk returns a tuple with the CreatedAt field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetCreatedAt -`func (o *SloResponse) SetCreatedAt(v string)` +`func (o *SloDefinitionResponse) SetCreatedAt(v string)` SetCreatedAt sets CreatedAt field to given value. ### GetUpdatedAt -`func (o *SloResponse) GetUpdatedAt() string` +`func (o *SloDefinitionResponse) GetUpdatedAt() string` GetUpdatedAt returns the UpdatedAt field if non-nil, zero value otherwise. ### GetUpdatedAtOk -`func (o *SloResponse) GetUpdatedAtOk() (*string, bool)` +`func (o *SloDefinitionResponse) GetUpdatedAtOk() (*string, bool)` GetUpdatedAtOk returns a tuple with the UpdatedAt field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetUpdatedAt -`func (o *SloResponse) SetUpdatedAt(v string)` +`func (o *SloDefinitionResponse) SetUpdatedAt(v string)` SetUpdatedAt sets UpdatedAt field to given value. +### GetVersion + +`func (o *SloDefinitionResponse) GetVersion() float64` + +GetVersion returns the Version field if non-nil, zero value otherwise. + +### GetVersionOk + +`func (o *SloDefinitionResponse) GetVersionOk() (*float64, bool)` + +GetVersionOk returns a tuple with the Version field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetVersion + +`func (o *SloDefinitionResponse) SetVersion(v float64)` + +SetVersion sets Version field to given value. + + [[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) diff --git a/generated/slo/docs/SloWithSummaryResponse.md b/generated/slo/docs/SloWithSummaryResponse.md new file mode 100644 index 000000000..2a57a3050 --- /dev/null +++ b/generated/slo/docs/SloWithSummaryResponse.md @@ -0,0 +1,387 @@ +# SloWithSummaryResponse + +## Properties + +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**Id** | **string** | The identifier of the SLO. | +**Name** | **string** | The name of the SLO. | +**Description** | **string** | The description of the SLO. | +**Indicator** | [**SloWithSummaryResponseIndicator**](SloWithSummaryResponseIndicator.md) | | +**TimeWindow** | [**TimeWindow**](TimeWindow.md) | | +**BudgetingMethod** | [**BudgetingMethod**](BudgetingMethod.md) | | +**Objective** | [**Objective**](Objective.md) | | +**Settings** | [**Settings**](Settings.md) | | +**Revision** | **float64** | The SLO revision | +**Summary** | [**Summary**](Summary.md) | | +**Enabled** | **bool** | Indicate if the SLO is enabled | +**GroupBy** | [**GroupBy**](GroupBy.md) | | +**InstanceId** | **string** | the value derived from the groupBy field, if present, otherwise '*' | +**Tags** | **[]string** | List of tags | +**CreatedAt** | **string** | The creation date | +**UpdatedAt** | **string** | The last update date | +**Version** | **float64** | The internal SLO version | + +## Methods + +### NewSloWithSummaryResponse + +`func NewSloWithSummaryResponse(id string, name string, description string, indicator SloWithSummaryResponseIndicator, timeWindow TimeWindow, budgetingMethod BudgetingMethod, objective Objective, settings Settings, revision float64, summary Summary, enabled bool, groupBy GroupBy, instanceId string, tags []string, createdAt string, updatedAt string, version float64, ) *SloWithSummaryResponse` + +NewSloWithSummaryResponse instantiates a new SloWithSummaryResponse object +This constructor will assign default values to properties that have it defined, +and makes sure properties required by API are set, but the set of arguments +will change when the set of required properties is changed + +### NewSloWithSummaryResponseWithDefaults + +`func NewSloWithSummaryResponseWithDefaults() *SloWithSummaryResponse` + +NewSloWithSummaryResponseWithDefaults instantiates a new SloWithSummaryResponse object +This constructor will only assign default values to properties that have it defined, +but it doesn't guarantee that properties required by API are set + +### GetId + +`func (o *SloWithSummaryResponse) GetId() string` + +GetId returns the Id field if non-nil, zero value otherwise. + +### GetIdOk + +`func (o *SloWithSummaryResponse) GetIdOk() (*string, bool)` + +GetIdOk returns a tuple with the Id field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetId + +`func (o *SloWithSummaryResponse) SetId(v string)` + +SetId sets Id field to given value. + + +### GetName + +`func (o *SloWithSummaryResponse) GetName() string` + +GetName returns the Name field if non-nil, zero value otherwise. + +### GetNameOk + +`func (o *SloWithSummaryResponse) GetNameOk() (*string, bool)` + +GetNameOk returns a tuple with the Name field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetName + +`func (o *SloWithSummaryResponse) SetName(v string)` + +SetName sets Name field to given value. + + +### GetDescription + +`func (o *SloWithSummaryResponse) GetDescription() string` + +GetDescription returns the Description field if non-nil, zero value otherwise. + +### GetDescriptionOk + +`func (o *SloWithSummaryResponse) GetDescriptionOk() (*string, bool)` + +GetDescriptionOk returns a tuple with the Description field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetDescription + +`func (o *SloWithSummaryResponse) SetDescription(v string)` + +SetDescription sets Description field to given value. + + +### GetIndicator + +`func (o *SloWithSummaryResponse) GetIndicator() SloWithSummaryResponseIndicator` + +GetIndicator returns the Indicator field if non-nil, zero value otherwise. + +### GetIndicatorOk + +`func (o *SloWithSummaryResponse) GetIndicatorOk() (*SloWithSummaryResponseIndicator, bool)` + +GetIndicatorOk returns a tuple with the Indicator field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetIndicator + +`func (o *SloWithSummaryResponse) SetIndicator(v SloWithSummaryResponseIndicator)` + +SetIndicator sets Indicator field to given value. + + +### GetTimeWindow + +`func (o *SloWithSummaryResponse) GetTimeWindow() TimeWindow` + +GetTimeWindow returns the TimeWindow field if non-nil, zero value otherwise. + +### GetTimeWindowOk + +`func (o *SloWithSummaryResponse) GetTimeWindowOk() (*TimeWindow, bool)` + +GetTimeWindowOk returns a tuple with the TimeWindow field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetTimeWindow + +`func (o *SloWithSummaryResponse) SetTimeWindow(v TimeWindow)` + +SetTimeWindow sets TimeWindow field to given value. + + +### GetBudgetingMethod + +`func (o *SloWithSummaryResponse) GetBudgetingMethod() BudgetingMethod` + +GetBudgetingMethod returns the BudgetingMethod field if non-nil, zero value otherwise. + +### GetBudgetingMethodOk + +`func (o *SloWithSummaryResponse) GetBudgetingMethodOk() (*BudgetingMethod, bool)` + +GetBudgetingMethodOk returns a tuple with the BudgetingMethod field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetBudgetingMethod + +`func (o *SloWithSummaryResponse) SetBudgetingMethod(v BudgetingMethod)` + +SetBudgetingMethod sets BudgetingMethod field to given value. + + +### GetObjective + +`func (o *SloWithSummaryResponse) GetObjective() Objective` + +GetObjective returns the Objective field if non-nil, zero value otherwise. + +### GetObjectiveOk + +`func (o *SloWithSummaryResponse) GetObjectiveOk() (*Objective, bool)` + +GetObjectiveOk returns a tuple with the Objective field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetObjective + +`func (o *SloWithSummaryResponse) SetObjective(v Objective)` + +SetObjective sets Objective field to given value. + + +### GetSettings + +`func (o *SloWithSummaryResponse) GetSettings() Settings` + +GetSettings returns the Settings field if non-nil, zero value otherwise. + +### GetSettingsOk + +`func (o *SloWithSummaryResponse) GetSettingsOk() (*Settings, bool)` + +GetSettingsOk returns a tuple with the Settings field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSettings + +`func (o *SloWithSummaryResponse) SetSettings(v Settings)` + +SetSettings sets Settings field to given value. + + +### GetRevision + +`func (o *SloWithSummaryResponse) GetRevision() float64` + +GetRevision returns the Revision field if non-nil, zero value otherwise. + +### GetRevisionOk + +`func (o *SloWithSummaryResponse) GetRevisionOk() (*float64, bool)` + +GetRevisionOk returns a tuple with the Revision field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetRevision + +`func (o *SloWithSummaryResponse) SetRevision(v float64)` + +SetRevision sets Revision field to given value. + + +### GetSummary + +`func (o *SloWithSummaryResponse) GetSummary() Summary` + +GetSummary returns the Summary field if non-nil, zero value otherwise. + +### GetSummaryOk + +`func (o *SloWithSummaryResponse) GetSummaryOk() (*Summary, bool)` + +GetSummaryOk returns a tuple with the Summary field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetSummary + +`func (o *SloWithSummaryResponse) SetSummary(v Summary)` + +SetSummary sets Summary field to given value. + + +### GetEnabled + +`func (o *SloWithSummaryResponse) GetEnabled() bool` + +GetEnabled returns the Enabled field if non-nil, zero value otherwise. + +### GetEnabledOk + +`func (o *SloWithSummaryResponse) GetEnabledOk() (*bool, bool)` + +GetEnabledOk returns a tuple with the Enabled field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetEnabled + +`func (o *SloWithSummaryResponse) SetEnabled(v bool)` + +SetEnabled sets Enabled field to given value. + + +### GetGroupBy + +`func (o *SloWithSummaryResponse) GetGroupBy() GroupBy` + +GetGroupBy returns the GroupBy field if non-nil, zero value otherwise. + +### GetGroupByOk + +`func (o *SloWithSummaryResponse) GetGroupByOk() (*GroupBy, bool)` + +GetGroupByOk returns a tuple with the GroupBy field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetGroupBy + +`func (o *SloWithSummaryResponse) SetGroupBy(v GroupBy)` + +SetGroupBy sets GroupBy field to given value. + + +### GetInstanceId + +`func (o *SloWithSummaryResponse) GetInstanceId() string` + +GetInstanceId returns the InstanceId field if non-nil, zero value otherwise. + +### GetInstanceIdOk + +`func (o *SloWithSummaryResponse) GetInstanceIdOk() (*string, bool)` + +GetInstanceIdOk returns a tuple with the InstanceId field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetInstanceId + +`func (o *SloWithSummaryResponse) SetInstanceId(v string)` + +SetInstanceId sets InstanceId field to given value. + + +### GetTags + +`func (o *SloWithSummaryResponse) GetTags() []string` + +GetTags returns the Tags field if non-nil, zero value otherwise. + +### GetTagsOk + +`func (o *SloWithSummaryResponse) GetTagsOk() (*[]string, bool)` + +GetTagsOk returns a tuple with the Tags field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetTags + +`func (o *SloWithSummaryResponse) SetTags(v []string)` + +SetTags sets Tags field to given value. + + +### GetCreatedAt + +`func (o *SloWithSummaryResponse) GetCreatedAt() string` + +GetCreatedAt returns the CreatedAt field if non-nil, zero value otherwise. + +### GetCreatedAtOk + +`func (o *SloWithSummaryResponse) GetCreatedAtOk() (*string, bool)` + +GetCreatedAtOk returns a tuple with the CreatedAt field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetCreatedAt + +`func (o *SloWithSummaryResponse) SetCreatedAt(v string)` + +SetCreatedAt sets CreatedAt field to given value. + + +### GetUpdatedAt + +`func (o *SloWithSummaryResponse) GetUpdatedAt() string` + +GetUpdatedAt returns the UpdatedAt field if non-nil, zero value otherwise. + +### GetUpdatedAtOk + +`func (o *SloWithSummaryResponse) GetUpdatedAtOk() (*string, bool)` + +GetUpdatedAtOk returns a tuple with the UpdatedAt field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetUpdatedAt + +`func (o *SloWithSummaryResponse) SetUpdatedAt(v string)` + +SetUpdatedAt sets UpdatedAt field to given value. + + +### GetVersion + +`func (o *SloWithSummaryResponse) GetVersion() float64` + +GetVersion returns the Version field if non-nil, zero value otherwise. + +### GetVersionOk + +`func (o *SloWithSummaryResponse) GetVersionOk() (*float64, bool)` + +GetVersionOk returns a tuple with the Version field if it's non-nil, zero value otherwise +and a boolean to check if the value has been set. + +### SetVersion + +`func (o *SloWithSummaryResponse) SetVersion(v float64)` + +SetVersion sets Version field to given value. + + + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/generated/slo/docs/SloResponseIndicator.md b/generated/slo/docs/SloWithSummaryResponseIndicator.md similarity index 57% rename from generated/slo/docs/SloResponseIndicator.md rename to generated/slo/docs/SloWithSummaryResponseIndicator.md index 0f3ea7851..269441cc0 100644 --- a/generated/slo/docs/SloResponseIndicator.md +++ b/generated/slo/docs/SloWithSummaryResponseIndicator.md @@ -1,4 +1,4 @@ -# SloResponseIndicator +# SloWithSummaryResponseIndicator ## Properties @@ -9,59 +9,59 @@ Name | Type | Description | Notes ## Methods -### NewSloResponseIndicator +### NewSloWithSummaryResponseIndicator -`func NewSloResponseIndicator(params IndicatorPropertiesTimesliceMetricParams, type_ string, ) *SloResponseIndicator` +`func NewSloWithSummaryResponseIndicator(params IndicatorPropertiesTimesliceMetricParams, type_ string, ) *SloWithSummaryResponseIndicator` -NewSloResponseIndicator instantiates a new SloResponseIndicator object +NewSloWithSummaryResponseIndicator instantiates a new SloWithSummaryResponseIndicator object This constructor will assign default values to properties that have it defined, and makes sure properties required by API are set, but the set of arguments will change when the set of required properties is changed -### NewSloResponseIndicatorWithDefaults +### NewSloWithSummaryResponseIndicatorWithDefaults -`func NewSloResponseIndicatorWithDefaults() *SloResponseIndicator` +`func NewSloWithSummaryResponseIndicatorWithDefaults() *SloWithSummaryResponseIndicator` -NewSloResponseIndicatorWithDefaults instantiates a new SloResponseIndicator object +NewSloWithSummaryResponseIndicatorWithDefaults instantiates a new SloWithSummaryResponseIndicator object This constructor will only assign default values to properties that have it defined, but it doesn't guarantee that properties required by API are set ### GetParams -`func (o *SloResponseIndicator) GetParams() IndicatorPropertiesTimesliceMetricParams` +`func (o *SloWithSummaryResponseIndicator) GetParams() IndicatorPropertiesTimesliceMetricParams` GetParams returns the Params field if non-nil, zero value otherwise. ### GetParamsOk -`func (o *SloResponseIndicator) GetParamsOk() (*IndicatorPropertiesTimesliceMetricParams, bool)` +`func (o *SloWithSummaryResponseIndicator) GetParamsOk() (*IndicatorPropertiesTimesliceMetricParams, bool)` GetParamsOk returns a tuple with the Params field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetParams -`func (o *SloResponseIndicator) SetParams(v IndicatorPropertiesTimesliceMetricParams)` +`func (o *SloWithSummaryResponseIndicator) SetParams(v IndicatorPropertiesTimesliceMetricParams)` SetParams sets Params field to given value. ### GetType -`func (o *SloResponseIndicator) GetType() string` +`func (o *SloWithSummaryResponseIndicator) GetType() string` GetType returns the Type field if non-nil, zero value otherwise. ### GetTypeOk -`func (o *SloResponseIndicator) GetTypeOk() (*string, bool)` +`func (o *SloWithSummaryResponseIndicator) GetTypeOk() (*string, bool)` GetTypeOk returns a tuple with the Type field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetType -`func (o *SloResponseIndicator) SetType(v string)` +`func (o *SloWithSummaryResponseIndicator) SetType(v string)` SetType sets Type field to given value. diff --git a/generated/slo/docs/UpdateSloRequest.md b/generated/slo/docs/UpdateSloRequest.md index 2a4cdb393..4518217c5 100644 --- a/generated/slo/docs/UpdateSloRequest.md +++ b/generated/slo/docs/UpdateSloRequest.md @@ -11,7 +11,7 @@ Name | Type | Description | Notes **BudgetingMethod** | Pointer to [**BudgetingMethod**](BudgetingMethod.md) | | [optional] **Objective** | Pointer to [**Objective**](Objective.md) | | [optional] **Settings** | Pointer to [**Settings**](Settings.md) | | [optional] -**GroupBy** | Pointer to [**SloResponseGroupBy**](SloResponseGroupBy.md) | | [optional] +**GroupBy** | Pointer to [**GroupBy**](GroupBy.md) | | [optional] **Tags** | Pointer to **[]string** | List of tags | [optional] ## Methods @@ -210,20 +210,20 @@ HasSettings returns a boolean if a field has been set. ### GetGroupBy -`func (o *UpdateSloRequest) GetGroupBy() SloResponseGroupBy` +`func (o *UpdateSloRequest) GetGroupBy() GroupBy` GetGroupBy returns the GroupBy field if non-nil, zero value otherwise. ### GetGroupByOk -`func (o *UpdateSloRequest) GetGroupByOk() (*SloResponseGroupBy, bool)` +`func (o *UpdateSloRequest) GetGroupByOk() (*GroupBy, bool)` GetGroupByOk returns a tuple with the GroupBy field if it's non-nil, zero value otherwise and a boolean to check if the value has been set. ### SetGroupBy -`func (o *UpdateSloRequest) SetGroupBy(v SloResponseGroupBy)` +`func (o *UpdateSloRequest) SetGroupBy(v GroupBy)` SetGroupBy sets GroupBy field to given value. diff --git a/generated/slo/model_400_response.go b/generated/slo/model_400_response.go index 1965ef831..cc5b81cbd 100644 --- a/generated/slo/model_400_response.go +++ b/generated/slo/model_400_response.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_401_response.go b/generated/slo/model_401_response.go index b4f7f83e7..d971daee0 100644 --- a/generated/slo/model_401_response.go +++ b/generated/slo/model_401_response.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_403_response.go b/generated/slo/model_403_response.go index 4ccd57a9a..339e40e7a 100644 --- a/generated/slo/model_403_response.go +++ b/generated/slo/model_403_response.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_404_response.go b/generated/slo/model_404_response.go index a52e5d6e3..f31dc6a5a 100644 --- a/generated/slo/model_404_response.go +++ b/generated/slo/model_404_response.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_409_response.go b/generated/slo/model_409_response.go index 8d3405b68..c9ac7217d 100644 --- a/generated/slo/model_409_response.go +++ b/generated/slo/model_409_response.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_budgeting_method.go b/generated/slo/model_budgeting_method.go index b22d09e1c..f0b471661 100644 --- a/generated/slo/model_budgeting_method.go +++ b/generated/slo/model_budgeting_method.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_bulk_delete_request.go b/generated/slo/model_bulk_delete_request.go new file mode 100644 index 000000000..b6e932a65 --- /dev/null +++ b/generated/slo/model_bulk_delete_request.go @@ -0,0 +1,116 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the BulkDeleteRequest type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &BulkDeleteRequest{} + +// BulkDeleteRequest The bulk delete SLO request takes a list of SLOs Definition id to delete. +type BulkDeleteRequest struct { + // An array of SLO Definition id + List []string `json:"list"` +} + +// NewBulkDeleteRequest instantiates a new BulkDeleteRequest object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewBulkDeleteRequest(list []string) *BulkDeleteRequest { + this := BulkDeleteRequest{} + this.List = list + return &this +} + +// NewBulkDeleteRequestWithDefaults instantiates a new BulkDeleteRequest object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewBulkDeleteRequestWithDefaults() *BulkDeleteRequest { + this := BulkDeleteRequest{} + return &this +} + +// GetList returns the List field value +func (o *BulkDeleteRequest) GetList() []string { + if o == nil { + var ret []string + return ret + } + + return o.List +} + +// GetListOk returns a tuple with the List field value +// and a boolean to check if the value has been set. +func (o *BulkDeleteRequest) GetListOk() ([]string, bool) { + if o == nil { + return nil, false + } + return o.List, true +} + +// SetList sets field value +func (o *BulkDeleteRequest) SetList(v []string) { + o.List = v +} + +func (o BulkDeleteRequest) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o BulkDeleteRequest) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["list"] = o.List + return toSerialize, nil +} + +type NullableBulkDeleteRequest struct { + value *BulkDeleteRequest + isSet bool +} + +func (v NullableBulkDeleteRequest) Get() *BulkDeleteRequest { + return v.value +} + +func (v *NullableBulkDeleteRequest) Set(val *BulkDeleteRequest) { + v.value = val + v.isSet = true +} + +func (v NullableBulkDeleteRequest) IsSet() bool { + return v.isSet +} + +func (v *NullableBulkDeleteRequest) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableBulkDeleteRequest(val *BulkDeleteRequest) *NullableBulkDeleteRequest { + return &NullableBulkDeleteRequest{value: val, isSet: true} +} + +func (v NullableBulkDeleteRequest) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableBulkDeleteRequest) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_bulk_delete_response.go b/generated/slo/model_bulk_delete_response.go new file mode 100644 index 000000000..e00c74fcf --- /dev/null +++ b/generated/slo/model_bulk_delete_response.go @@ -0,0 +1,125 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the BulkDeleteResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &BulkDeleteResponse{} + +// BulkDeleteResponse The bulk delete SLO response returns a taskId that can be used to poll for its status +type BulkDeleteResponse struct { + // The taskId of the bulk delete operation + TaskId *string `json:"taskId,omitempty"` +} + +// NewBulkDeleteResponse instantiates a new BulkDeleteResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewBulkDeleteResponse() *BulkDeleteResponse { + this := BulkDeleteResponse{} + return &this +} + +// NewBulkDeleteResponseWithDefaults instantiates a new BulkDeleteResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewBulkDeleteResponseWithDefaults() *BulkDeleteResponse { + this := BulkDeleteResponse{} + return &this +} + +// GetTaskId returns the TaskId field value if set, zero value otherwise. +func (o *BulkDeleteResponse) GetTaskId() string { + if o == nil || IsNil(o.TaskId) { + var ret string + return ret + } + return *o.TaskId +} + +// GetTaskIdOk returns a tuple with the TaskId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkDeleteResponse) GetTaskIdOk() (*string, bool) { + if o == nil || IsNil(o.TaskId) { + return nil, false + } + return o.TaskId, true +} + +// HasTaskId returns a boolean if a field has been set. +func (o *BulkDeleteResponse) HasTaskId() bool { + if o != nil && !IsNil(o.TaskId) { + return true + } + + return false +} + +// SetTaskId gets a reference to the given string and assigns it to the TaskId field. +func (o *BulkDeleteResponse) SetTaskId(v string) { + o.TaskId = &v +} + +func (o BulkDeleteResponse) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o BulkDeleteResponse) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.TaskId) { + toSerialize["taskId"] = o.TaskId + } + return toSerialize, nil +} + +type NullableBulkDeleteResponse struct { + value *BulkDeleteResponse + isSet bool +} + +func (v NullableBulkDeleteResponse) Get() *BulkDeleteResponse { + return v.value +} + +func (v *NullableBulkDeleteResponse) Set(val *BulkDeleteResponse) { + v.value = val + v.isSet = true +} + +func (v NullableBulkDeleteResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableBulkDeleteResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableBulkDeleteResponse(val *BulkDeleteResponse) *NullableBulkDeleteResponse { + return &NullableBulkDeleteResponse{value: val, isSet: true} +} + +func (v NullableBulkDeleteResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableBulkDeleteResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_bulk_delete_status_response.go b/generated/slo/model_bulk_delete_status_response.go new file mode 100644 index 000000000..9a3aacbdb --- /dev/null +++ b/generated/slo/model_bulk_delete_status_response.go @@ -0,0 +1,199 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the BulkDeleteStatusResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &BulkDeleteStatusResponse{} + +// BulkDeleteStatusResponse Indicates if the bulk deletion is completed, with the detailed results of the operation. +type BulkDeleteStatusResponse struct { + // Indicates if the bulk deletion operation is completed + IsDone *bool `json:"isDone,omitempty"` + // The error message if the bulk deletion operation failed + Error *string `json:"error,omitempty"` + // The results of the bulk deletion operation, including the success status and any errors for each SLO + Results []BulkDeleteStatusResponseResultsInner `json:"results,omitempty"` +} + +// NewBulkDeleteStatusResponse instantiates a new BulkDeleteStatusResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewBulkDeleteStatusResponse() *BulkDeleteStatusResponse { + this := BulkDeleteStatusResponse{} + return &this +} + +// NewBulkDeleteStatusResponseWithDefaults instantiates a new BulkDeleteStatusResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewBulkDeleteStatusResponseWithDefaults() *BulkDeleteStatusResponse { + this := BulkDeleteStatusResponse{} + return &this +} + +// GetIsDone returns the IsDone field value if set, zero value otherwise. +func (o *BulkDeleteStatusResponse) GetIsDone() bool { + if o == nil || IsNil(o.IsDone) { + var ret bool + return ret + } + return *o.IsDone +} + +// GetIsDoneOk returns a tuple with the IsDone field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkDeleteStatusResponse) GetIsDoneOk() (*bool, bool) { + if o == nil || IsNil(o.IsDone) { + return nil, false + } + return o.IsDone, true +} + +// HasIsDone returns a boolean if a field has been set. +func (o *BulkDeleteStatusResponse) HasIsDone() bool { + if o != nil && !IsNil(o.IsDone) { + return true + } + + return false +} + +// SetIsDone gets a reference to the given bool and assigns it to the IsDone field. +func (o *BulkDeleteStatusResponse) SetIsDone(v bool) { + o.IsDone = &v +} + +// GetError returns the Error field value if set, zero value otherwise. +func (o *BulkDeleteStatusResponse) GetError() string { + if o == nil || IsNil(o.Error) { + var ret string + return ret + } + return *o.Error +} + +// GetErrorOk returns a tuple with the Error field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkDeleteStatusResponse) GetErrorOk() (*string, bool) { + if o == nil || IsNil(o.Error) { + return nil, false + } + return o.Error, true +} + +// HasError returns a boolean if a field has been set. +func (o *BulkDeleteStatusResponse) HasError() bool { + if o != nil && !IsNil(o.Error) { + return true + } + + return false +} + +// SetError gets a reference to the given string and assigns it to the Error field. +func (o *BulkDeleteStatusResponse) SetError(v string) { + o.Error = &v +} + +// GetResults returns the Results field value if set, zero value otherwise. +func (o *BulkDeleteStatusResponse) GetResults() []BulkDeleteStatusResponseResultsInner { + if o == nil || IsNil(o.Results) { + var ret []BulkDeleteStatusResponseResultsInner + return ret + } + return o.Results +} + +// GetResultsOk returns a tuple with the Results field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkDeleteStatusResponse) GetResultsOk() ([]BulkDeleteStatusResponseResultsInner, bool) { + if o == nil || IsNil(o.Results) { + return nil, false + } + return o.Results, true +} + +// HasResults returns a boolean if a field has been set. +func (o *BulkDeleteStatusResponse) HasResults() bool { + if o != nil && !IsNil(o.Results) { + return true + } + + return false +} + +// SetResults gets a reference to the given []BulkDeleteStatusResponseResultsInner and assigns it to the Results field. +func (o *BulkDeleteStatusResponse) SetResults(v []BulkDeleteStatusResponseResultsInner) { + o.Results = v +} + +func (o BulkDeleteStatusResponse) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o BulkDeleteStatusResponse) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.IsDone) { + toSerialize["isDone"] = o.IsDone + } + if !IsNil(o.Error) { + toSerialize["error"] = o.Error + } + if !IsNil(o.Results) { + toSerialize["results"] = o.Results + } + return toSerialize, nil +} + +type NullableBulkDeleteStatusResponse struct { + value *BulkDeleteStatusResponse + isSet bool +} + +func (v NullableBulkDeleteStatusResponse) Get() *BulkDeleteStatusResponse { + return v.value +} + +func (v *NullableBulkDeleteStatusResponse) Set(val *BulkDeleteStatusResponse) { + v.value = val + v.isSet = true +} + +func (v NullableBulkDeleteStatusResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableBulkDeleteStatusResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableBulkDeleteStatusResponse(val *BulkDeleteStatusResponse) *NullableBulkDeleteStatusResponse { + return &NullableBulkDeleteStatusResponse{value: val, isSet: true} +} + +func (v NullableBulkDeleteStatusResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableBulkDeleteStatusResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_bulk_delete_status_response_results_inner.go b/generated/slo/model_bulk_delete_status_response_results_inner.go new file mode 100644 index 000000000..dca2f3280 --- /dev/null +++ b/generated/slo/model_bulk_delete_status_response_results_inner.go @@ -0,0 +1,199 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the BulkDeleteStatusResponseResultsInner type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &BulkDeleteStatusResponseResultsInner{} + +// BulkDeleteStatusResponseResultsInner struct for BulkDeleteStatusResponseResultsInner +type BulkDeleteStatusResponseResultsInner struct { + // The ID of the SLO that was deleted + Id *string `json:"id,omitempty"` + // The result of the deletion operation for this SLO + Success *bool `json:"success,omitempty"` + // The error message if the deletion operation failed for this SLO + Error *string `json:"error,omitempty"` +} + +// NewBulkDeleteStatusResponseResultsInner instantiates a new BulkDeleteStatusResponseResultsInner object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewBulkDeleteStatusResponseResultsInner() *BulkDeleteStatusResponseResultsInner { + this := BulkDeleteStatusResponseResultsInner{} + return &this +} + +// NewBulkDeleteStatusResponseResultsInnerWithDefaults instantiates a new BulkDeleteStatusResponseResultsInner object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewBulkDeleteStatusResponseResultsInnerWithDefaults() *BulkDeleteStatusResponseResultsInner { + this := BulkDeleteStatusResponseResultsInner{} + return &this +} + +// GetId returns the Id field value if set, zero value otherwise. +func (o *BulkDeleteStatusResponseResultsInner) GetId() string { + if o == nil || IsNil(o.Id) { + var ret string + return ret + } + return *o.Id +} + +// GetIdOk returns a tuple with the Id field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkDeleteStatusResponseResultsInner) GetIdOk() (*string, bool) { + if o == nil || IsNil(o.Id) { + return nil, false + } + return o.Id, true +} + +// HasId returns a boolean if a field has been set. +func (o *BulkDeleteStatusResponseResultsInner) HasId() bool { + if o != nil && !IsNil(o.Id) { + return true + } + + return false +} + +// SetId gets a reference to the given string and assigns it to the Id field. +func (o *BulkDeleteStatusResponseResultsInner) SetId(v string) { + o.Id = &v +} + +// GetSuccess returns the Success field value if set, zero value otherwise. +func (o *BulkDeleteStatusResponseResultsInner) GetSuccess() bool { + if o == nil || IsNil(o.Success) { + var ret bool + return ret + } + return *o.Success +} + +// GetSuccessOk returns a tuple with the Success field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkDeleteStatusResponseResultsInner) GetSuccessOk() (*bool, bool) { + if o == nil || IsNil(o.Success) { + return nil, false + } + return o.Success, true +} + +// HasSuccess returns a boolean if a field has been set. +func (o *BulkDeleteStatusResponseResultsInner) HasSuccess() bool { + if o != nil && !IsNil(o.Success) { + return true + } + + return false +} + +// SetSuccess gets a reference to the given bool and assigns it to the Success field. +func (o *BulkDeleteStatusResponseResultsInner) SetSuccess(v bool) { + o.Success = &v +} + +// GetError returns the Error field value if set, zero value otherwise. +func (o *BulkDeleteStatusResponseResultsInner) GetError() string { + if o == nil || IsNil(o.Error) { + var ret string + return ret + } + return *o.Error +} + +// GetErrorOk returns a tuple with the Error field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkDeleteStatusResponseResultsInner) GetErrorOk() (*string, bool) { + if o == nil || IsNil(o.Error) { + return nil, false + } + return o.Error, true +} + +// HasError returns a boolean if a field has been set. +func (o *BulkDeleteStatusResponseResultsInner) HasError() bool { + if o != nil && !IsNil(o.Error) { + return true + } + + return false +} + +// SetError gets a reference to the given string and assigns it to the Error field. +func (o *BulkDeleteStatusResponseResultsInner) SetError(v string) { + o.Error = &v +} + +func (o BulkDeleteStatusResponseResultsInner) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o BulkDeleteStatusResponseResultsInner) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Id) { + toSerialize["id"] = o.Id + } + if !IsNil(o.Success) { + toSerialize["success"] = o.Success + } + if !IsNil(o.Error) { + toSerialize["error"] = o.Error + } + return toSerialize, nil +} + +type NullableBulkDeleteStatusResponseResultsInner struct { + value *BulkDeleteStatusResponseResultsInner + isSet bool +} + +func (v NullableBulkDeleteStatusResponseResultsInner) Get() *BulkDeleteStatusResponseResultsInner { + return v.value +} + +func (v *NullableBulkDeleteStatusResponseResultsInner) Set(val *BulkDeleteStatusResponseResultsInner) { + v.value = val + v.isSet = true +} + +func (v NullableBulkDeleteStatusResponseResultsInner) IsSet() bool { + return v.isSet +} + +func (v *NullableBulkDeleteStatusResponseResultsInner) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableBulkDeleteStatusResponseResultsInner(val *BulkDeleteStatusResponseResultsInner) *NullableBulkDeleteStatusResponseResultsInner { + return &NullableBulkDeleteStatusResponseResultsInner{value: val, isSet: true} +} + +func (v NullableBulkDeleteStatusResponseResultsInner) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableBulkDeleteStatusResponseResultsInner) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_bulk_purge_rollup_request.go b/generated/slo/model_bulk_purge_rollup_request.go new file mode 100644 index 000000000..74dbf5f21 --- /dev/null +++ b/generated/slo/model_bulk_purge_rollup_request.go @@ -0,0 +1,143 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the BulkPurgeRollupRequest type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &BulkPurgeRollupRequest{} + +// BulkPurgeRollupRequest The bulk purge rollup data request takes a list of SLO ids and a purge policy, then deletes the rollup data according to the purge policy. This API can be used to remove the staled data of an instance SLO that no longer get updated. +type BulkPurgeRollupRequest struct { + // An array of slo ids + List []string `json:"list"` + PurgePolicy BulkPurgeRollupRequestPurgePolicy `json:"purgePolicy"` +} + +// NewBulkPurgeRollupRequest instantiates a new BulkPurgeRollupRequest object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewBulkPurgeRollupRequest(list []string, purgePolicy BulkPurgeRollupRequestPurgePolicy) *BulkPurgeRollupRequest { + this := BulkPurgeRollupRequest{} + this.List = list + this.PurgePolicy = purgePolicy + return &this +} + +// NewBulkPurgeRollupRequestWithDefaults instantiates a new BulkPurgeRollupRequest object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewBulkPurgeRollupRequestWithDefaults() *BulkPurgeRollupRequest { + this := BulkPurgeRollupRequest{} + return &this +} + +// GetList returns the List field value +func (o *BulkPurgeRollupRequest) GetList() []string { + if o == nil { + var ret []string + return ret + } + + return o.List +} + +// GetListOk returns a tuple with the List field value +// and a boolean to check if the value has been set. +func (o *BulkPurgeRollupRequest) GetListOk() ([]string, bool) { + if o == nil { + return nil, false + } + return o.List, true +} + +// SetList sets field value +func (o *BulkPurgeRollupRequest) SetList(v []string) { + o.List = v +} + +// GetPurgePolicy returns the PurgePolicy field value +func (o *BulkPurgeRollupRequest) GetPurgePolicy() BulkPurgeRollupRequestPurgePolicy { + if o == nil { + var ret BulkPurgeRollupRequestPurgePolicy + return ret + } + + return o.PurgePolicy +} + +// GetPurgePolicyOk returns a tuple with the PurgePolicy field value +// and a boolean to check if the value has been set. +func (o *BulkPurgeRollupRequest) GetPurgePolicyOk() (*BulkPurgeRollupRequestPurgePolicy, bool) { + if o == nil { + return nil, false + } + return &o.PurgePolicy, true +} + +// SetPurgePolicy sets field value +func (o *BulkPurgeRollupRequest) SetPurgePolicy(v BulkPurgeRollupRequestPurgePolicy) { + o.PurgePolicy = v +} + +func (o BulkPurgeRollupRequest) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o BulkPurgeRollupRequest) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["list"] = o.List + toSerialize["purgePolicy"] = o.PurgePolicy + return toSerialize, nil +} + +type NullableBulkPurgeRollupRequest struct { + value *BulkPurgeRollupRequest + isSet bool +} + +func (v NullableBulkPurgeRollupRequest) Get() *BulkPurgeRollupRequest { + return v.value +} + +func (v *NullableBulkPurgeRollupRequest) Set(val *BulkPurgeRollupRequest) { + v.value = val + v.isSet = true +} + +func (v NullableBulkPurgeRollupRequest) IsSet() bool { + return v.isSet +} + +func (v *NullableBulkPurgeRollupRequest) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableBulkPurgeRollupRequest(val *BulkPurgeRollupRequest) *NullableBulkPurgeRollupRequest { + return &NullableBulkPurgeRollupRequest{value: val, isSet: true} +} + +func (v NullableBulkPurgeRollupRequest) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableBulkPurgeRollupRequest) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_bulk_purge_rollup_request_purge_policy.go b/generated/slo/model_bulk_purge_rollup_request_purge_policy.go new file mode 100644 index 000000000..386d9947a --- /dev/null +++ b/generated/slo/model_bulk_purge_rollup_request_purge_policy.go @@ -0,0 +1,145 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" + "fmt" +) + +// BulkPurgeRollupRequestPurgePolicy - Policy that dictates which SLI documents to purge based on age +type BulkPurgeRollupRequestPurgePolicy struct { + BulkPurgeRollupRequestPurgePolicyOneOf *BulkPurgeRollupRequestPurgePolicyOneOf + BulkPurgeRollupRequestPurgePolicyOneOf1 *BulkPurgeRollupRequestPurgePolicyOneOf1 +} + +// BulkPurgeRollupRequestPurgePolicyOneOfAsBulkPurgeRollupRequestPurgePolicy is a convenience function that returns BulkPurgeRollupRequestPurgePolicyOneOf wrapped in BulkPurgeRollupRequestPurgePolicy +func BulkPurgeRollupRequestPurgePolicyOneOfAsBulkPurgeRollupRequestPurgePolicy(v *BulkPurgeRollupRequestPurgePolicyOneOf) BulkPurgeRollupRequestPurgePolicy { + return BulkPurgeRollupRequestPurgePolicy{ + BulkPurgeRollupRequestPurgePolicyOneOf: v, + } +} + +// BulkPurgeRollupRequestPurgePolicyOneOf1AsBulkPurgeRollupRequestPurgePolicy is a convenience function that returns BulkPurgeRollupRequestPurgePolicyOneOf1 wrapped in BulkPurgeRollupRequestPurgePolicy +func BulkPurgeRollupRequestPurgePolicyOneOf1AsBulkPurgeRollupRequestPurgePolicy(v *BulkPurgeRollupRequestPurgePolicyOneOf1) BulkPurgeRollupRequestPurgePolicy { + return BulkPurgeRollupRequestPurgePolicy{ + BulkPurgeRollupRequestPurgePolicyOneOf1: v, + } +} + +// Unmarshal JSON data into one of the pointers in the struct +func (dst *BulkPurgeRollupRequestPurgePolicy) UnmarshalJSON(data []byte) error { + var err error + match := 0 + // try to unmarshal data into BulkPurgeRollupRequestPurgePolicyOneOf + err = json.Unmarshal(data, &dst.BulkPurgeRollupRequestPurgePolicyOneOf) + if err == nil { + jsonBulkPurgeRollupRequestPurgePolicyOneOf, _ := json.Marshal(dst.BulkPurgeRollupRequestPurgePolicyOneOf) + if string(jsonBulkPurgeRollupRequestPurgePolicyOneOf) == "{}" { // empty struct + dst.BulkPurgeRollupRequestPurgePolicyOneOf = nil + } else { + match++ + } + } else { + dst.BulkPurgeRollupRequestPurgePolicyOneOf = nil + } + + // try to unmarshal data into BulkPurgeRollupRequestPurgePolicyOneOf1 + err = json.Unmarshal(data, &dst.BulkPurgeRollupRequestPurgePolicyOneOf1) + if err == nil { + jsonBulkPurgeRollupRequestPurgePolicyOneOf1, _ := json.Marshal(dst.BulkPurgeRollupRequestPurgePolicyOneOf1) + if string(jsonBulkPurgeRollupRequestPurgePolicyOneOf1) == "{}" { // empty struct + dst.BulkPurgeRollupRequestPurgePolicyOneOf1 = nil + } else { + match++ + } + } else { + dst.BulkPurgeRollupRequestPurgePolicyOneOf1 = nil + } + + if match > 1 { // more than 1 match + // reset to nil + dst.BulkPurgeRollupRequestPurgePolicyOneOf = nil + dst.BulkPurgeRollupRequestPurgePolicyOneOf1 = nil + + return fmt.Errorf("data matches more than one schema in oneOf(BulkPurgeRollupRequestPurgePolicy)") + } else if match == 1 { + return nil // exactly one match + } else { // no match + return fmt.Errorf("data failed to match schemas in oneOf(BulkPurgeRollupRequestPurgePolicy)") + } +} + +// Marshal data from the first non-nil pointers in the struct to JSON +func (src BulkPurgeRollupRequestPurgePolicy) MarshalJSON() ([]byte, error) { + if src.BulkPurgeRollupRequestPurgePolicyOneOf != nil { + return json.Marshal(&src.BulkPurgeRollupRequestPurgePolicyOneOf) + } + + if src.BulkPurgeRollupRequestPurgePolicyOneOf1 != nil { + return json.Marshal(&src.BulkPurgeRollupRequestPurgePolicyOneOf1) + } + + return nil, nil // no data in oneOf schemas +} + +// Get the actual instance +func (obj *BulkPurgeRollupRequestPurgePolicy) GetActualInstance() interface{} { + if obj == nil { + return nil + } + if obj.BulkPurgeRollupRequestPurgePolicyOneOf != nil { + return obj.BulkPurgeRollupRequestPurgePolicyOneOf + } + + if obj.BulkPurgeRollupRequestPurgePolicyOneOf1 != nil { + return obj.BulkPurgeRollupRequestPurgePolicyOneOf1 + } + + // all schemas are nil + return nil +} + +type NullableBulkPurgeRollupRequestPurgePolicy struct { + value *BulkPurgeRollupRequestPurgePolicy + isSet bool +} + +func (v NullableBulkPurgeRollupRequestPurgePolicy) Get() *BulkPurgeRollupRequestPurgePolicy { + return v.value +} + +func (v *NullableBulkPurgeRollupRequestPurgePolicy) Set(val *BulkPurgeRollupRequestPurgePolicy) { + v.value = val + v.isSet = true +} + +func (v NullableBulkPurgeRollupRequestPurgePolicy) IsSet() bool { + return v.isSet +} + +func (v *NullableBulkPurgeRollupRequestPurgePolicy) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableBulkPurgeRollupRequestPurgePolicy(val *BulkPurgeRollupRequestPurgePolicy) *NullableBulkPurgeRollupRequestPurgePolicy { + return &NullableBulkPurgeRollupRequestPurgePolicy{value: val, isSet: true} +} + +func (v NullableBulkPurgeRollupRequestPurgePolicy) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableBulkPurgeRollupRequestPurgePolicy) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_bulk_purge_rollup_request_purge_policy_one_of.go b/generated/slo/model_bulk_purge_rollup_request_purge_policy_one_of.go new file mode 100644 index 000000000..72fbedf9e --- /dev/null +++ b/generated/slo/model_bulk_purge_rollup_request_purge_policy_one_of.go @@ -0,0 +1,162 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the BulkPurgeRollupRequestPurgePolicyOneOf type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &BulkPurgeRollupRequestPurgePolicyOneOf{} + +// BulkPurgeRollupRequestPurgePolicyOneOf struct for BulkPurgeRollupRequestPurgePolicyOneOf +type BulkPurgeRollupRequestPurgePolicyOneOf struct { + // Specifies whether documents will be purged based on a specific age or on a timestamp + PurgeType *string `json:"purgeType,omitempty"` + // The duration to determine which documents to purge, formatted as {duration}{unit}. This value should be greater than or equal to the time window of every SLO provided. + Age *string `json:"age,omitempty"` +} + +// NewBulkPurgeRollupRequestPurgePolicyOneOf instantiates a new BulkPurgeRollupRequestPurgePolicyOneOf object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewBulkPurgeRollupRequestPurgePolicyOneOf() *BulkPurgeRollupRequestPurgePolicyOneOf { + this := BulkPurgeRollupRequestPurgePolicyOneOf{} + return &this +} + +// NewBulkPurgeRollupRequestPurgePolicyOneOfWithDefaults instantiates a new BulkPurgeRollupRequestPurgePolicyOneOf object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewBulkPurgeRollupRequestPurgePolicyOneOfWithDefaults() *BulkPurgeRollupRequestPurgePolicyOneOf { + this := BulkPurgeRollupRequestPurgePolicyOneOf{} + return &this +} + +// GetPurgeType returns the PurgeType field value if set, zero value otherwise. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf) GetPurgeType() string { + if o == nil || IsNil(o.PurgeType) { + var ret string + return ret + } + return *o.PurgeType +} + +// GetPurgeTypeOk returns a tuple with the PurgeType field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf) GetPurgeTypeOk() (*string, bool) { + if o == nil || IsNil(o.PurgeType) { + return nil, false + } + return o.PurgeType, true +} + +// HasPurgeType returns a boolean if a field has been set. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf) HasPurgeType() bool { + if o != nil && !IsNil(o.PurgeType) { + return true + } + + return false +} + +// SetPurgeType gets a reference to the given string and assigns it to the PurgeType field. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf) SetPurgeType(v string) { + o.PurgeType = &v +} + +// GetAge returns the Age field value if set, zero value otherwise. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf) GetAge() string { + if o == nil || IsNil(o.Age) { + var ret string + return ret + } + return *o.Age +} + +// GetAgeOk returns a tuple with the Age field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf) GetAgeOk() (*string, bool) { + if o == nil || IsNil(o.Age) { + return nil, false + } + return o.Age, true +} + +// HasAge returns a boolean if a field has been set. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf) HasAge() bool { + if o != nil && !IsNil(o.Age) { + return true + } + + return false +} + +// SetAge gets a reference to the given string and assigns it to the Age field. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf) SetAge(v string) { + o.Age = &v +} + +func (o BulkPurgeRollupRequestPurgePolicyOneOf) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o BulkPurgeRollupRequestPurgePolicyOneOf) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.PurgeType) { + toSerialize["purgeType"] = o.PurgeType + } + if !IsNil(o.Age) { + toSerialize["age"] = o.Age + } + return toSerialize, nil +} + +type NullableBulkPurgeRollupRequestPurgePolicyOneOf struct { + value *BulkPurgeRollupRequestPurgePolicyOneOf + isSet bool +} + +func (v NullableBulkPurgeRollupRequestPurgePolicyOneOf) Get() *BulkPurgeRollupRequestPurgePolicyOneOf { + return v.value +} + +func (v *NullableBulkPurgeRollupRequestPurgePolicyOneOf) Set(val *BulkPurgeRollupRequestPurgePolicyOneOf) { + v.value = val + v.isSet = true +} + +func (v NullableBulkPurgeRollupRequestPurgePolicyOneOf) IsSet() bool { + return v.isSet +} + +func (v *NullableBulkPurgeRollupRequestPurgePolicyOneOf) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableBulkPurgeRollupRequestPurgePolicyOneOf(val *BulkPurgeRollupRequestPurgePolicyOneOf) *NullableBulkPurgeRollupRequestPurgePolicyOneOf { + return &NullableBulkPurgeRollupRequestPurgePolicyOneOf{value: val, isSet: true} +} + +func (v NullableBulkPurgeRollupRequestPurgePolicyOneOf) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableBulkPurgeRollupRequestPurgePolicyOneOf) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_bulk_purge_rollup_request_purge_policy_one_of_1.go b/generated/slo/model_bulk_purge_rollup_request_purge_policy_one_of_1.go new file mode 100644 index 000000000..a64138364 --- /dev/null +++ b/generated/slo/model_bulk_purge_rollup_request_purge_policy_one_of_1.go @@ -0,0 +1,162 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the BulkPurgeRollupRequestPurgePolicyOneOf1 type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &BulkPurgeRollupRequestPurgePolicyOneOf1{} + +// BulkPurgeRollupRequestPurgePolicyOneOf1 struct for BulkPurgeRollupRequestPurgePolicyOneOf1 +type BulkPurgeRollupRequestPurgePolicyOneOf1 struct { + // Specifies whether documents will be purged based on a specific age or on a timestamp + PurgeType *string `json:"purgeType,omitempty"` + // The timestamp to determine which documents to purge, formatted in ISO. This value should be older than the applicable time window of every SLO provided. + Timestamp *string `json:"timestamp,omitempty"` +} + +// NewBulkPurgeRollupRequestPurgePolicyOneOf1 instantiates a new BulkPurgeRollupRequestPurgePolicyOneOf1 object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewBulkPurgeRollupRequestPurgePolicyOneOf1() *BulkPurgeRollupRequestPurgePolicyOneOf1 { + this := BulkPurgeRollupRequestPurgePolicyOneOf1{} + return &this +} + +// NewBulkPurgeRollupRequestPurgePolicyOneOf1WithDefaults instantiates a new BulkPurgeRollupRequestPurgePolicyOneOf1 object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewBulkPurgeRollupRequestPurgePolicyOneOf1WithDefaults() *BulkPurgeRollupRequestPurgePolicyOneOf1 { + this := BulkPurgeRollupRequestPurgePolicyOneOf1{} + return &this +} + +// GetPurgeType returns the PurgeType field value if set, zero value otherwise. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) GetPurgeType() string { + if o == nil || IsNil(o.PurgeType) { + var ret string + return ret + } + return *o.PurgeType +} + +// GetPurgeTypeOk returns a tuple with the PurgeType field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) GetPurgeTypeOk() (*string, bool) { + if o == nil || IsNil(o.PurgeType) { + return nil, false + } + return o.PurgeType, true +} + +// HasPurgeType returns a boolean if a field has been set. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) HasPurgeType() bool { + if o != nil && !IsNil(o.PurgeType) { + return true + } + + return false +} + +// SetPurgeType gets a reference to the given string and assigns it to the PurgeType field. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) SetPurgeType(v string) { + o.PurgeType = &v +} + +// GetTimestamp returns the Timestamp field value if set, zero value otherwise. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) GetTimestamp() string { + if o == nil || IsNil(o.Timestamp) { + var ret string + return ret + } + return *o.Timestamp +} + +// GetTimestampOk returns a tuple with the Timestamp field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) GetTimestampOk() (*string, bool) { + if o == nil || IsNil(o.Timestamp) { + return nil, false + } + return o.Timestamp, true +} + +// HasTimestamp returns a boolean if a field has been set. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) HasTimestamp() bool { + if o != nil && !IsNil(o.Timestamp) { + return true + } + + return false +} + +// SetTimestamp gets a reference to the given string and assigns it to the Timestamp field. +func (o *BulkPurgeRollupRequestPurgePolicyOneOf1) SetTimestamp(v string) { + o.Timestamp = &v +} + +func (o BulkPurgeRollupRequestPurgePolicyOneOf1) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o BulkPurgeRollupRequestPurgePolicyOneOf1) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.PurgeType) { + toSerialize["purgeType"] = o.PurgeType + } + if !IsNil(o.Timestamp) { + toSerialize["timestamp"] = o.Timestamp + } + return toSerialize, nil +} + +type NullableBulkPurgeRollupRequestPurgePolicyOneOf1 struct { + value *BulkPurgeRollupRequestPurgePolicyOneOf1 + isSet bool +} + +func (v NullableBulkPurgeRollupRequestPurgePolicyOneOf1) Get() *BulkPurgeRollupRequestPurgePolicyOneOf1 { + return v.value +} + +func (v *NullableBulkPurgeRollupRequestPurgePolicyOneOf1) Set(val *BulkPurgeRollupRequestPurgePolicyOneOf1) { + v.value = val + v.isSet = true +} + +func (v NullableBulkPurgeRollupRequestPurgePolicyOneOf1) IsSet() bool { + return v.isSet +} + +func (v *NullableBulkPurgeRollupRequestPurgePolicyOneOf1) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableBulkPurgeRollupRequestPurgePolicyOneOf1(val *BulkPurgeRollupRequestPurgePolicyOneOf1) *NullableBulkPurgeRollupRequestPurgePolicyOneOf1 { + return &NullableBulkPurgeRollupRequestPurgePolicyOneOf1{value: val, isSet: true} +} + +func (v NullableBulkPurgeRollupRequestPurgePolicyOneOf1) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableBulkPurgeRollupRequestPurgePolicyOneOf1) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_bulk_purge_rollup_response.go b/generated/slo/model_bulk_purge_rollup_response.go new file mode 100644 index 000000000..91661acb3 --- /dev/null +++ b/generated/slo/model_bulk_purge_rollup_response.go @@ -0,0 +1,125 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the BulkPurgeRollupResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &BulkPurgeRollupResponse{} + +// BulkPurgeRollupResponse The bulk purge rollup data response returns a task id from the elasticsearch deleteByQuery response. +type BulkPurgeRollupResponse struct { + // The task id of the purge operation + TaskId *string `json:"taskId,omitempty"` +} + +// NewBulkPurgeRollupResponse instantiates a new BulkPurgeRollupResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewBulkPurgeRollupResponse() *BulkPurgeRollupResponse { + this := BulkPurgeRollupResponse{} + return &this +} + +// NewBulkPurgeRollupResponseWithDefaults instantiates a new BulkPurgeRollupResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewBulkPurgeRollupResponseWithDefaults() *BulkPurgeRollupResponse { + this := BulkPurgeRollupResponse{} + return &this +} + +// GetTaskId returns the TaskId field value if set, zero value otherwise. +func (o *BulkPurgeRollupResponse) GetTaskId() string { + if o == nil || IsNil(o.TaskId) { + var ret string + return ret + } + return *o.TaskId +} + +// GetTaskIdOk returns a tuple with the TaskId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *BulkPurgeRollupResponse) GetTaskIdOk() (*string, bool) { + if o == nil || IsNil(o.TaskId) { + return nil, false + } + return o.TaskId, true +} + +// HasTaskId returns a boolean if a field has been set. +func (o *BulkPurgeRollupResponse) HasTaskId() bool { + if o != nil && !IsNil(o.TaskId) { + return true + } + + return false +} + +// SetTaskId gets a reference to the given string and assigns it to the TaskId field. +func (o *BulkPurgeRollupResponse) SetTaskId(v string) { + o.TaskId = &v +} + +func (o BulkPurgeRollupResponse) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o BulkPurgeRollupResponse) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.TaskId) { + toSerialize["taskId"] = o.TaskId + } + return toSerialize, nil +} + +type NullableBulkPurgeRollupResponse struct { + value *BulkPurgeRollupResponse + isSet bool +} + +func (v NullableBulkPurgeRollupResponse) Get() *BulkPurgeRollupResponse { + return v.value +} + +func (v *NullableBulkPurgeRollupResponse) Set(val *BulkPurgeRollupResponse) { + v.value = val + v.isSet = true +} + +func (v NullableBulkPurgeRollupResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableBulkPurgeRollupResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableBulkPurgeRollupResponse(val *BulkPurgeRollupResponse) *NullableBulkPurgeRollupResponse { + return &NullableBulkPurgeRollupResponse{value: val, isSet: true} +} + +func (v NullableBulkPurgeRollupResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableBulkPurgeRollupResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_create_slo_request.go b/generated/slo/model_create_slo_request.go index 02210deb0..7c2add5b3 100644 --- a/generated/slo/model_create_slo_request.go +++ b/generated/slo/model_create_slo_request.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -30,7 +30,7 @@ type CreateSloRequest struct { BudgetingMethod BudgetingMethod `json:"budgetingMethod"` Objective Objective `json:"objective"` Settings *Settings `json:"settings,omitempty"` - GroupBy *SloResponseGroupBy `json:"groupBy,omitempty"` + GroupBy *GroupBy `json:"groupBy,omitempty"` // List of tags Tags []string `json:"tags,omitempty"` } @@ -267,9 +267,9 @@ func (o *CreateSloRequest) SetSettings(v Settings) { } // GetGroupBy returns the GroupBy field value if set, zero value otherwise. -func (o *CreateSloRequest) GetGroupBy() SloResponseGroupBy { +func (o *CreateSloRequest) GetGroupBy() GroupBy { if o == nil || IsNil(o.GroupBy) { - var ret SloResponseGroupBy + var ret GroupBy return ret } return *o.GroupBy @@ -277,7 +277,7 @@ func (o *CreateSloRequest) GetGroupBy() SloResponseGroupBy { // GetGroupByOk returns a tuple with the GroupBy field value if set, nil otherwise // and a boolean to check if the value has been set. -func (o *CreateSloRequest) GetGroupByOk() (*SloResponseGroupBy, bool) { +func (o *CreateSloRequest) GetGroupByOk() (*GroupBy, bool) { if o == nil || IsNil(o.GroupBy) { return nil, false } @@ -293,8 +293,8 @@ func (o *CreateSloRequest) HasGroupBy() bool { return false } -// SetGroupBy gets a reference to the given SloResponseGroupBy and assigns it to the GroupBy field. -func (o *CreateSloRequest) SetGroupBy(v SloResponseGroupBy) { +// SetGroupBy gets a reference to the given GroupBy and assigns it to the GroupBy field. +func (o *CreateSloRequest) SetGroupBy(v GroupBy) { o.GroupBy = &v } diff --git a/generated/slo/model_create_slo_request_indicator.go b/generated/slo/model_create_slo_request_indicator.go index 56c4975b2..8c46f30ed 100644 --- a/generated/slo/model_create_slo_request_indicator.go +++ b/generated/slo/model_create_slo_request_indicator.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_create_slo_response.go b/generated/slo/model_create_slo_response.go index 53d6c95db..793f7a05c 100644 --- a/generated/slo/model_create_slo_response.go +++ b/generated/slo/model_create_slo_response.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_delete_slo_instances_request.go b/generated/slo/model_delete_slo_instances_request.go index a8a4be95f..c9e381a7e 100644 --- a/generated/slo/model_delete_slo_instances_request.go +++ b/generated/slo/model_delete_slo_instances_request.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_delete_slo_instances_request_list_inner.go b/generated/slo/model_delete_slo_instances_request_list_inner.go index 22694c856..1dcc1042a 100644 --- a/generated/slo/model_delete_slo_instances_request_list_inner.go +++ b/generated/slo/model_delete_slo_instances_request_list_inner.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_error_budget.go b/generated/slo/model_error_budget.go index d7d8e6b6d..ff30efc8a 100644 --- a/generated/slo/model_error_budget.go +++ b/generated/slo/model_error_budget.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_filter.go b/generated/slo/model_filter.go new file mode 100644 index 000000000..df99bdb07 --- /dev/null +++ b/generated/slo/model_filter.go @@ -0,0 +1,160 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the Filter type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &Filter{} + +// Filter Defines properties for a filter +type Filter struct { + Query map[string]interface{} `json:"query,omitempty"` + Meta *FilterMeta `json:"meta,omitempty"` +} + +// NewFilter instantiates a new Filter object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewFilter() *Filter { + this := Filter{} + return &this +} + +// NewFilterWithDefaults instantiates a new Filter object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewFilterWithDefaults() *Filter { + this := Filter{} + return &this +} + +// GetQuery returns the Query field value if set, zero value otherwise. +func (o *Filter) GetQuery() map[string]interface{} { + if o == nil || IsNil(o.Query) { + var ret map[string]interface{} + return ret + } + return o.Query +} + +// GetQueryOk returns a tuple with the Query field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *Filter) GetQueryOk() (map[string]interface{}, bool) { + if o == nil || IsNil(o.Query) { + return map[string]interface{}{}, false + } + return o.Query, true +} + +// HasQuery returns a boolean if a field has been set. +func (o *Filter) HasQuery() bool { + if o != nil && !IsNil(o.Query) { + return true + } + + return false +} + +// SetQuery gets a reference to the given map[string]interface{} and assigns it to the Query field. +func (o *Filter) SetQuery(v map[string]interface{}) { + o.Query = v +} + +// GetMeta returns the Meta field value if set, zero value otherwise. +func (o *Filter) GetMeta() FilterMeta { + if o == nil || IsNil(o.Meta) { + var ret FilterMeta + return ret + } + return *o.Meta +} + +// GetMetaOk returns a tuple with the Meta field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *Filter) GetMetaOk() (*FilterMeta, bool) { + if o == nil || IsNil(o.Meta) { + return nil, false + } + return o.Meta, true +} + +// HasMeta returns a boolean if a field has been set. +func (o *Filter) HasMeta() bool { + if o != nil && !IsNil(o.Meta) { + return true + } + + return false +} + +// SetMeta gets a reference to the given FilterMeta and assigns it to the Meta field. +func (o *Filter) SetMeta(v FilterMeta) { + o.Meta = &v +} + +func (o Filter) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o Filter) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Query) { + toSerialize["query"] = o.Query + } + if !IsNil(o.Meta) { + toSerialize["meta"] = o.Meta + } + return toSerialize, nil +} + +type NullableFilter struct { + value *Filter + isSet bool +} + +func (v NullableFilter) Get() *Filter { + return v.value +} + +func (v *NullableFilter) Set(val *Filter) { + v.value = val + v.isSet = true +} + +func (v NullableFilter) IsSet() bool { + return v.isSet +} + +func (v *NullableFilter) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableFilter(val *Filter) *NullableFilter { + return &NullableFilter{value: val, isSet: true} +} + +func (v NullableFilter) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableFilter) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_filter_meta.go b/generated/slo/model_filter_meta.go new file mode 100644 index 000000000..8565f7ddd --- /dev/null +++ b/generated/slo/model_filter_meta.go @@ -0,0 +1,531 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the FilterMeta type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &FilterMeta{} + +// FilterMeta Defines properties for a filter +type FilterMeta struct { + Alias NullableString `json:"alias,omitempty"` + Disabled *bool `json:"disabled,omitempty"` + Negate *bool `json:"negate,omitempty"` + ControlledBy *string `json:"controlledBy,omitempty"` + Group *string `json:"group,omitempty"` + Index *string `json:"index,omitempty"` + IsMultiIndex *bool `json:"isMultiIndex,omitempty"` + Type *string `json:"type,omitempty"` + Key *string `json:"key,omitempty"` + Params map[string]interface{} `json:"params,omitempty"` + Value *string `json:"value,omitempty"` + Field *string `json:"field,omitempty"` +} + +// NewFilterMeta instantiates a new FilterMeta object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewFilterMeta() *FilterMeta { + this := FilterMeta{} + return &this +} + +// NewFilterMetaWithDefaults instantiates a new FilterMeta object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewFilterMetaWithDefaults() *FilterMeta { + this := FilterMeta{} + return &this +} + +// GetAlias returns the Alias field value if set, zero value otherwise (both if not set or set to explicit null). +func (o *FilterMeta) GetAlias() string { + if o == nil || IsNil(o.Alias.Get()) { + var ret string + return ret + } + return *o.Alias.Get() +} + +// GetAliasOk returns a tuple with the Alias field value if set, nil otherwise +// and a boolean to check if the value has been set. +// NOTE: If the value is an explicit nil, `nil, true` will be returned +func (o *FilterMeta) GetAliasOk() (*string, bool) { + if o == nil { + return nil, false + } + return o.Alias.Get(), o.Alias.IsSet() +} + +// HasAlias returns a boolean if a field has been set. +func (o *FilterMeta) HasAlias() bool { + if o != nil && o.Alias.IsSet() { + return true + } + + return false +} + +// SetAlias gets a reference to the given NullableString and assigns it to the Alias field. +func (o *FilterMeta) SetAlias(v string) { + o.Alias.Set(&v) +} + +// SetAliasNil sets the value for Alias to be an explicit nil +func (o *FilterMeta) SetAliasNil() { + o.Alias.Set(nil) +} + +// UnsetAlias ensures that no value is present for Alias, not even an explicit nil +func (o *FilterMeta) UnsetAlias() { + o.Alias.Unset() +} + +// GetDisabled returns the Disabled field value if set, zero value otherwise. +func (o *FilterMeta) GetDisabled() bool { + if o == nil || IsNil(o.Disabled) { + var ret bool + return ret + } + return *o.Disabled +} + +// GetDisabledOk returns a tuple with the Disabled field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterMeta) GetDisabledOk() (*bool, bool) { + if o == nil || IsNil(o.Disabled) { + return nil, false + } + return o.Disabled, true +} + +// HasDisabled returns a boolean if a field has been set. +func (o *FilterMeta) HasDisabled() bool { + if o != nil && !IsNil(o.Disabled) { + return true + } + + return false +} + +// SetDisabled gets a reference to the given bool and assigns it to the Disabled field. +func (o *FilterMeta) SetDisabled(v bool) { + o.Disabled = &v +} + +// GetNegate returns the Negate field value if set, zero value otherwise. +func (o *FilterMeta) GetNegate() bool { + if o == nil || IsNil(o.Negate) { + var ret bool + return ret + } + return *o.Negate +} + +// GetNegateOk returns a tuple with the Negate field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterMeta) GetNegateOk() (*bool, bool) { + if o == nil || IsNil(o.Negate) { + return nil, false + } + return o.Negate, true +} + +// HasNegate returns a boolean if a field has been set. +func (o *FilterMeta) HasNegate() bool { + if o != nil && !IsNil(o.Negate) { + return true + } + + return false +} + +// SetNegate gets a reference to the given bool and assigns it to the Negate field. +func (o *FilterMeta) SetNegate(v bool) { + o.Negate = &v +} + +// GetControlledBy returns the ControlledBy field value if set, zero value otherwise. +func (o *FilterMeta) GetControlledBy() string { + if o == nil || IsNil(o.ControlledBy) { + var ret string + return ret + } + return *o.ControlledBy +} + +// GetControlledByOk returns a tuple with the ControlledBy field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterMeta) GetControlledByOk() (*string, bool) { + if o == nil || IsNil(o.ControlledBy) { + return nil, false + } + return o.ControlledBy, true +} + +// HasControlledBy returns a boolean if a field has been set. +func (o *FilterMeta) HasControlledBy() bool { + if o != nil && !IsNil(o.ControlledBy) { + return true + } + + return false +} + +// SetControlledBy gets a reference to the given string and assigns it to the ControlledBy field. +func (o *FilterMeta) SetControlledBy(v string) { + o.ControlledBy = &v +} + +// GetGroup returns the Group field value if set, zero value otherwise. +func (o *FilterMeta) GetGroup() string { + if o == nil || IsNil(o.Group) { + var ret string + return ret + } + return *o.Group +} + +// GetGroupOk returns a tuple with the Group field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterMeta) GetGroupOk() (*string, bool) { + if o == nil || IsNil(o.Group) { + return nil, false + } + return o.Group, true +} + +// HasGroup returns a boolean if a field has been set. +func (o *FilterMeta) HasGroup() bool { + if o != nil && !IsNil(o.Group) { + return true + } + + return false +} + +// SetGroup gets a reference to the given string and assigns it to the Group field. +func (o *FilterMeta) SetGroup(v string) { + o.Group = &v +} + +// GetIndex returns the Index field value if set, zero value otherwise. +func (o *FilterMeta) GetIndex() string { + if o == nil || IsNil(o.Index) { + var ret string + return ret + } + return *o.Index +} + +// GetIndexOk returns a tuple with the Index field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterMeta) GetIndexOk() (*string, bool) { + if o == nil || IsNil(o.Index) { + return nil, false + } + return o.Index, true +} + +// HasIndex returns a boolean if a field has been set. +func (o *FilterMeta) HasIndex() bool { + if o != nil && !IsNil(o.Index) { + return true + } + + return false +} + +// SetIndex gets a reference to the given string and assigns it to the Index field. +func (o *FilterMeta) SetIndex(v string) { + o.Index = &v +} + +// GetIsMultiIndex returns the IsMultiIndex field value if set, zero value otherwise. +func (o *FilterMeta) GetIsMultiIndex() bool { + if o == nil || IsNil(o.IsMultiIndex) { + var ret bool + return ret + } + return *o.IsMultiIndex +} + +// GetIsMultiIndexOk returns a tuple with the IsMultiIndex field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterMeta) GetIsMultiIndexOk() (*bool, bool) { + if o == nil || IsNil(o.IsMultiIndex) { + return nil, false + } + return o.IsMultiIndex, true +} + +// HasIsMultiIndex returns a boolean if a field has been set. +func (o *FilterMeta) HasIsMultiIndex() bool { + if o != nil && !IsNil(o.IsMultiIndex) { + return true + } + + return false +} + +// SetIsMultiIndex gets a reference to the given bool and assigns it to the IsMultiIndex field. +func (o *FilterMeta) SetIsMultiIndex(v bool) { + o.IsMultiIndex = &v +} + +// GetType returns the Type field value if set, zero value otherwise. +func (o *FilterMeta) GetType() string { + if o == nil || IsNil(o.Type) { + var ret string + return ret + } + return *o.Type +} + +// GetTypeOk returns a tuple with the Type field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterMeta) GetTypeOk() (*string, bool) { + if o == nil || IsNil(o.Type) { + return nil, false + } + return o.Type, true +} + +// HasType returns a boolean if a field has been set. +func (o *FilterMeta) HasType() bool { + if o != nil && !IsNil(o.Type) { + return true + } + + return false +} + +// SetType gets a reference to the given string and assigns it to the Type field. +func (o *FilterMeta) SetType(v string) { + o.Type = &v +} + +// GetKey returns the Key field value if set, zero value otherwise. +func (o *FilterMeta) GetKey() string { + if o == nil || IsNil(o.Key) { + var ret string + return ret + } + return *o.Key +} + +// GetKeyOk returns a tuple with the Key field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterMeta) GetKeyOk() (*string, bool) { + if o == nil || IsNil(o.Key) { + return nil, false + } + return o.Key, true +} + +// HasKey returns a boolean if a field has been set. +func (o *FilterMeta) HasKey() bool { + if o != nil && !IsNil(o.Key) { + return true + } + + return false +} + +// SetKey gets a reference to the given string and assigns it to the Key field. +func (o *FilterMeta) SetKey(v string) { + o.Key = &v +} + +// GetParams returns the Params field value if set, zero value otherwise. +func (o *FilterMeta) GetParams() map[string]interface{} { + if o == nil || IsNil(o.Params) { + var ret map[string]interface{} + return ret + } + return o.Params +} + +// GetParamsOk returns a tuple with the Params field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterMeta) GetParamsOk() (map[string]interface{}, bool) { + if o == nil || IsNil(o.Params) { + return map[string]interface{}{}, false + } + return o.Params, true +} + +// HasParams returns a boolean if a field has been set. +func (o *FilterMeta) HasParams() bool { + if o != nil && !IsNil(o.Params) { + return true + } + + return false +} + +// SetParams gets a reference to the given map[string]interface{} and assigns it to the Params field. +func (o *FilterMeta) SetParams(v map[string]interface{}) { + o.Params = v +} + +// GetValue returns the Value field value if set, zero value otherwise. +func (o *FilterMeta) GetValue() string { + if o == nil || IsNil(o.Value) { + var ret string + return ret + } + return *o.Value +} + +// GetValueOk returns a tuple with the Value field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterMeta) GetValueOk() (*string, bool) { + if o == nil || IsNil(o.Value) { + return nil, false + } + return o.Value, true +} + +// HasValue returns a boolean if a field has been set. +func (o *FilterMeta) HasValue() bool { + if o != nil && !IsNil(o.Value) { + return true + } + + return false +} + +// SetValue gets a reference to the given string and assigns it to the Value field. +func (o *FilterMeta) SetValue(v string) { + o.Value = &v +} + +// GetField returns the Field field value if set, zero value otherwise. +func (o *FilterMeta) GetField() string { + if o == nil || IsNil(o.Field) { + var ret string + return ret + } + return *o.Field +} + +// GetFieldOk returns a tuple with the Field field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FilterMeta) GetFieldOk() (*string, bool) { + if o == nil || IsNil(o.Field) { + return nil, false + } + return o.Field, true +} + +// HasField returns a boolean if a field has been set. +func (o *FilterMeta) HasField() bool { + if o != nil && !IsNil(o.Field) { + return true + } + + return false +} + +// SetField gets a reference to the given string and assigns it to the Field field. +func (o *FilterMeta) SetField(v string) { + o.Field = &v +} + +func (o FilterMeta) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o FilterMeta) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if o.Alias.IsSet() { + toSerialize["alias"] = o.Alias.Get() + } + if !IsNil(o.Disabled) { + toSerialize["disabled"] = o.Disabled + } + if !IsNil(o.Negate) { + toSerialize["negate"] = o.Negate + } + if !IsNil(o.ControlledBy) { + toSerialize["controlledBy"] = o.ControlledBy + } + if !IsNil(o.Group) { + toSerialize["group"] = o.Group + } + if !IsNil(o.Index) { + toSerialize["index"] = o.Index + } + if !IsNil(o.IsMultiIndex) { + toSerialize["isMultiIndex"] = o.IsMultiIndex + } + if !IsNil(o.Type) { + toSerialize["type"] = o.Type + } + if !IsNil(o.Key) { + toSerialize["key"] = o.Key + } + if !IsNil(o.Params) { + toSerialize["params"] = o.Params + } + if !IsNil(o.Value) { + toSerialize["value"] = o.Value + } + if !IsNil(o.Field) { + toSerialize["field"] = o.Field + } + return toSerialize, nil +} + +type NullableFilterMeta struct { + value *FilterMeta + isSet bool +} + +func (v NullableFilterMeta) Get() *FilterMeta { + return v.value +} + +func (v *NullableFilterMeta) Set(val *FilterMeta) { + v.value = val + v.isSet = true +} + +func (v NullableFilterMeta) IsSet() bool { + return v.isSet +} + +func (v *NullableFilterMeta) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableFilterMeta(val *FilterMeta) *NullableFilterMeta { + return &NullableFilterMeta{value: val, isSet: true} +} + +func (v NullableFilterMeta) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableFilterMeta) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_find_slo_definitions_response.go b/generated/slo/model_find_slo_definitions_response.go new file mode 100644 index 000000000..7f10a786b --- /dev/null +++ b/generated/slo/model_find_slo_definitions_response.go @@ -0,0 +1,145 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" + "fmt" +) + +// FindSloDefinitionsResponse - A paginated response of SLO definitions matching the query. +type FindSloDefinitionsResponse struct { + FindSloDefinitionsResponseOneOf *FindSloDefinitionsResponseOneOf + FindSloDefinitionsResponseOneOf1 *FindSloDefinitionsResponseOneOf1 +} + +// FindSloDefinitionsResponseOneOfAsFindSloDefinitionsResponse is a convenience function that returns FindSloDefinitionsResponseOneOf wrapped in FindSloDefinitionsResponse +func FindSloDefinitionsResponseOneOfAsFindSloDefinitionsResponse(v *FindSloDefinitionsResponseOneOf) FindSloDefinitionsResponse { + return FindSloDefinitionsResponse{ + FindSloDefinitionsResponseOneOf: v, + } +} + +// FindSloDefinitionsResponseOneOf1AsFindSloDefinitionsResponse is a convenience function that returns FindSloDefinitionsResponseOneOf1 wrapped in FindSloDefinitionsResponse +func FindSloDefinitionsResponseOneOf1AsFindSloDefinitionsResponse(v *FindSloDefinitionsResponseOneOf1) FindSloDefinitionsResponse { + return FindSloDefinitionsResponse{ + FindSloDefinitionsResponseOneOf1: v, + } +} + +// Unmarshal JSON data into one of the pointers in the struct +func (dst *FindSloDefinitionsResponse) UnmarshalJSON(data []byte) error { + var err error + match := 0 + // try to unmarshal data into FindSloDefinitionsResponseOneOf + err = json.Unmarshal(data, &dst.FindSloDefinitionsResponseOneOf) + if err == nil { + jsonFindSloDefinitionsResponseOneOf, _ := json.Marshal(dst.FindSloDefinitionsResponseOneOf) + if string(jsonFindSloDefinitionsResponseOneOf) == "{}" { // empty struct + dst.FindSloDefinitionsResponseOneOf = nil + } else { + match++ + } + } else { + dst.FindSloDefinitionsResponseOneOf = nil + } + + // try to unmarshal data into FindSloDefinitionsResponseOneOf1 + err = json.Unmarshal(data, &dst.FindSloDefinitionsResponseOneOf1) + if err == nil { + jsonFindSloDefinitionsResponseOneOf1, _ := json.Marshal(dst.FindSloDefinitionsResponseOneOf1) + if string(jsonFindSloDefinitionsResponseOneOf1) == "{}" { // empty struct + dst.FindSloDefinitionsResponseOneOf1 = nil + } else { + match++ + } + } else { + dst.FindSloDefinitionsResponseOneOf1 = nil + } + + if match > 1 { // more than 1 match + // reset to nil + dst.FindSloDefinitionsResponseOneOf = nil + dst.FindSloDefinitionsResponseOneOf1 = nil + + return fmt.Errorf("data matches more than one schema in oneOf(FindSloDefinitionsResponse)") + } else if match == 1 { + return nil // exactly one match + } else { // no match + return fmt.Errorf("data failed to match schemas in oneOf(FindSloDefinitionsResponse)") + } +} + +// Marshal data from the first non-nil pointers in the struct to JSON +func (src FindSloDefinitionsResponse) MarshalJSON() ([]byte, error) { + if src.FindSloDefinitionsResponseOneOf != nil { + return json.Marshal(&src.FindSloDefinitionsResponseOneOf) + } + + if src.FindSloDefinitionsResponseOneOf1 != nil { + return json.Marshal(&src.FindSloDefinitionsResponseOneOf1) + } + + return nil, nil // no data in oneOf schemas +} + +// Get the actual instance +func (obj *FindSloDefinitionsResponse) GetActualInstance() interface{} { + if obj == nil { + return nil + } + if obj.FindSloDefinitionsResponseOneOf != nil { + return obj.FindSloDefinitionsResponseOneOf + } + + if obj.FindSloDefinitionsResponseOneOf1 != nil { + return obj.FindSloDefinitionsResponseOneOf1 + } + + // all schemas are nil + return nil +} + +type NullableFindSloDefinitionsResponse struct { + value *FindSloDefinitionsResponse + isSet bool +} + +func (v NullableFindSloDefinitionsResponse) Get() *FindSloDefinitionsResponse { + return v.value +} + +func (v *NullableFindSloDefinitionsResponse) Set(val *FindSloDefinitionsResponse) { + v.value = val + v.isSet = true +} + +func (v NullableFindSloDefinitionsResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableFindSloDefinitionsResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableFindSloDefinitionsResponse(val *FindSloDefinitionsResponse) *NullableFindSloDefinitionsResponse { + return &NullableFindSloDefinitionsResponse{value: val, isSet: true} +} + +func (v NullableFindSloDefinitionsResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableFindSloDefinitionsResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_find_slo_definitions_response_one_of.go b/generated/slo/model_find_slo_definitions_response_one_of.go new file mode 100644 index 000000000..67ff9551d --- /dev/null +++ b/generated/slo/model_find_slo_definitions_response_one_of.go @@ -0,0 +1,232 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the FindSloDefinitionsResponseOneOf type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &FindSloDefinitionsResponseOneOf{} + +// FindSloDefinitionsResponseOneOf struct for FindSloDefinitionsResponseOneOf +type FindSloDefinitionsResponseOneOf struct { + Page *float64 `json:"page,omitempty"` + PerPage *float64 `json:"perPage,omitempty"` + Total *float64 `json:"total,omitempty"` + Results []SloWithSummaryResponse `json:"results,omitempty"` +} + +// NewFindSloDefinitionsResponseOneOf instantiates a new FindSloDefinitionsResponseOneOf object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewFindSloDefinitionsResponseOneOf() *FindSloDefinitionsResponseOneOf { + this := FindSloDefinitionsResponseOneOf{} + return &this +} + +// NewFindSloDefinitionsResponseOneOfWithDefaults instantiates a new FindSloDefinitionsResponseOneOf object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewFindSloDefinitionsResponseOneOfWithDefaults() *FindSloDefinitionsResponseOneOf { + this := FindSloDefinitionsResponseOneOf{} + return &this +} + +// GetPage returns the Page field value if set, zero value otherwise. +func (o *FindSloDefinitionsResponseOneOf) GetPage() float64 { + if o == nil || IsNil(o.Page) { + var ret float64 + return ret + } + return *o.Page +} + +// GetPageOk returns a tuple with the Page field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloDefinitionsResponseOneOf) GetPageOk() (*float64, bool) { + if o == nil || IsNil(o.Page) { + return nil, false + } + return o.Page, true +} + +// HasPage returns a boolean if a field has been set. +func (o *FindSloDefinitionsResponseOneOf) HasPage() bool { + if o != nil && !IsNil(o.Page) { + return true + } + + return false +} + +// SetPage gets a reference to the given float64 and assigns it to the Page field. +func (o *FindSloDefinitionsResponseOneOf) SetPage(v float64) { + o.Page = &v +} + +// GetPerPage returns the PerPage field value if set, zero value otherwise. +func (o *FindSloDefinitionsResponseOneOf) GetPerPage() float64 { + if o == nil || IsNil(o.PerPage) { + var ret float64 + return ret + } + return *o.PerPage +} + +// GetPerPageOk returns a tuple with the PerPage field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloDefinitionsResponseOneOf) GetPerPageOk() (*float64, bool) { + if o == nil || IsNil(o.PerPage) { + return nil, false + } + return o.PerPage, true +} + +// HasPerPage returns a boolean if a field has been set. +func (o *FindSloDefinitionsResponseOneOf) HasPerPage() bool { + if o != nil && !IsNil(o.PerPage) { + return true + } + + return false +} + +// SetPerPage gets a reference to the given float64 and assigns it to the PerPage field. +func (o *FindSloDefinitionsResponseOneOf) SetPerPage(v float64) { + o.PerPage = &v +} + +// GetTotal returns the Total field value if set, zero value otherwise. +func (o *FindSloDefinitionsResponseOneOf) GetTotal() float64 { + if o == nil || IsNil(o.Total) { + var ret float64 + return ret + } + return *o.Total +} + +// GetTotalOk returns a tuple with the Total field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloDefinitionsResponseOneOf) GetTotalOk() (*float64, bool) { + if o == nil || IsNil(o.Total) { + return nil, false + } + return o.Total, true +} + +// HasTotal returns a boolean if a field has been set. +func (o *FindSloDefinitionsResponseOneOf) HasTotal() bool { + if o != nil && !IsNil(o.Total) { + return true + } + + return false +} + +// SetTotal gets a reference to the given float64 and assigns it to the Total field. +func (o *FindSloDefinitionsResponseOneOf) SetTotal(v float64) { + o.Total = &v +} + +// GetResults returns the Results field value if set, zero value otherwise. +func (o *FindSloDefinitionsResponseOneOf) GetResults() []SloWithSummaryResponse { + if o == nil || IsNil(o.Results) { + var ret []SloWithSummaryResponse + return ret + } + return o.Results +} + +// GetResultsOk returns a tuple with the Results field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloDefinitionsResponseOneOf) GetResultsOk() ([]SloWithSummaryResponse, bool) { + if o == nil || IsNil(o.Results) { + return nil, false + } + return o.Results, true +} + +// HasResults returns a boolean if a field has been set. +func (o *FindSloDefinitionsResponseOneOf) HasResults() bool { + if o != nil && !IsNil(o.Results) { + return true + } + + return false +} + +// SetResults gets a reference to the given []SloWithSummaryResponse and assigns it to the Results field. +func (o *FindSloDefinitionsResponseOneOf) SetResults(v []SloWithSummaryResponse) { + o.Results = v +} + +func (o FindSloDefinitionsResponseOneOf) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o FindSloDefinitionsResponseOneOf) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Page) { + toSerialize["page"] = o.Page + } + if !IsNil(o.PerPage) { + toSerialize["perPage"] = o.PerPage + } + if !IsNil(o.Total) { + toSerialize["total"] = o.Total + } + if !IsNil(o.Results) { + toSerialize["results"] = o.Results + } + return toSerialize, nil +} + +type NullableFindSloDefinitionsResponseOneOf struct { + value *FindSloDefinitionsResponseOneOf + isSet bool +} + +func (v NullableFindSloDefinitionsResponseOneOf) Get() *FindSloDefinitionsResponseOneOf { + return v.value +} + +func (v *NullableFindSloDefinitionsResponseOneOf) Set(val *FindSloDefinitionsResponseOneOf) { + v.value = val + v.isSet = true +} + +func (v NullableFindSloDefinitionsResponseOneOf) IsSet() bool { + return v.isSet +} + +func (v *NullableFindSloDefinitionsResponseOneOf) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableFindSloDefinitionsResponseOneOf(val *FindSloDefinitionsResponseOneOf) *NullableFindSloDefinitionsResponseOneOf { + return &NullableFindSloDefinitionsResponseOneOf{value: val, isSet: true} +} + +func (v NullableFindSloDefinitionsResponseOneOf) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableFindSloDefinitionsResponseOneOf) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_find_slo_definitions_response_one_of_1.go b/generated/slo/model_find_slo_definitions_response_one_of_1.go new file mode 100644 index 000000000..9476c49af --- /dev/null +++ b/generated/slo/model_find_slo_definitions_response_one_of_1.go @@ -0,0 +1,311 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the FindSloDefinitionsResponseOneOf1 type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &FindSloDefinitionsResponseOneOf1{} + +// FindSloDefinitionsResponseOneOf1 struct for FindSloDefinitionsResponseOneOf1 +type FindSloDefinitionsResponseOneOf1 struct { + // for backward compability + Page *float64 `json:"page,omitempty"` + // for backward compability + PerPage *float64 `json:"perPage,omitempty"` + Size *float64 `json:"size,omitempty"` + // the cursor to provide to get the next paged results + SearchAfter []string `json:"searchAfter,omitempty"` + Total *float64 `json:"total,omitempty"` + Results []SloWithSummaryResponse `json:"results,omitempty"` +} + +// NewFindSloDefinitionsResponseOneOf1 instantiates a new FindSloDefinitionsResponseOneOf1 object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewFindSloDefinitionsResponseOneOf1() *FindSloDefinitionsResponseOneOf1 { + this := FindSloDefinitionsResponseOneOf1{} + var page float64 = 1 + this.Page = &page + return &this +} + +// NewFindSloDefinitionsResponseOneOf1WithDefaults instantiates a new FindSloDefinitionsResponseOneOf1 object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewFindSloDefinitionsResponseOneOf1WithDefaults() *FindSloDefinitionsResponseOneOf1 { + this := FindSloDefinitionsResponseOneOf1{} + var page float64 = 1 + this.Page = &page + return &this +} + +// GetPage returns the Page field value if set, zero value otherwise. +func (o *FindSloDefinitionsResponseOneOf1) GetPage() float64 { + if o == nil || IsNil(o.Page) { + var ret float64 + return ret + } + return *o.Page +} + +// GetPageOk returns a tuple with the Page field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloDefinitionsResponseOneOf1) GetPageOk() (*float64, bool) { + if o == nil || IsNil(o.Page) { + return nil, false + } + return o.Page, true +} + +// HasPage returns a boolean if a field has been set. +func (o *FindSloDefinitionsResponseOneOf1) HasPage() bool { + if o != nil && !IsNil(o.Page) { + return true + } + + return false +} + +// SetPage gets a reference to the given float64 and assigns it to the Page field. +func (o *FindSloDefinitionsResponseOneOf1) SetPage(v float64) { + o.Page = &v +} + +// GetPerPage returns the PerPage field value if set, zero value otherwise. +func (o *FindSloDefinitionsResponseOneOf1) GetPerPage() float64 { + if o == nil || IsNil(o.PerPage) { + var ret float64 + return ret + } + return *o.PerPage +} + +// GetPerPageOk returns a tuple with the PerPage field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloDefinitionsResponseOneOf1) GetPerPageOk() (*float64, bool) { + if o == nil || IsNil(o.PerPage) { + return nil, false + } + return o.PerPage, true +} + +// HasPerPage returns a boolean if a field has been set. +func (o *FindSloDefinitionsResponseOneOf1) HasPerPage() bool { + if o != nil && !IsNil(o.PerPage) { + return true + } + + return false +} + +// SetPerPage gets a reference to the given float64 and assigns it to the PerPage field. +func (o *FindSloDefinitionsResponseOneOf1) SetPerPage(v float64) { + o.PerPage = &v +} + +// GetSize returns the Size field value if set, zero value otherwise. +func (o *FindSloDefinitionsResponseOneOf1) GetSize() float64 { + if o == nil || IsNil(o.Size) { + var ret float64 + return ret + } + return *o.Size +} + +// GetSizeOk returns a tuple with the Size field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloDefinitionsResponseOneOf1) GetSizeOk() (*float64, bool) { + if o == nil || IsNil(o.Size) { + return nil, false + } + return o.Size, true +} + +// HasSize returns a boolean if a field has been set. +func (o *FindSloDefinitionsResponseOneOf1) HasSize() bool { + if o != nil && !IsNil(o.Size) { + return true + } + + return false +} + +// SetSize gets a reference to the given float64 and assigns it to the Size field. +func (o *FindSloDefinitionsResponseOneOf1) SetSize(v float64) { + o.Size = &v +} + +// GetSearchAfter returns the SearchAfter field value if set, zero value otherwise. +func (o *FindSloDefinitionsResponseOneOf1) GetSearchAfter() []string { + if o == nil || IsNil(o.SearchAfter) { + var ret []string + return ret + } + return o.SearchAfter +} + +// GetSearchAfterOk returns a tuple with the SearchAfter field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloDefinitionsResponseOneOf1) GetSearchAfterOk() ([]string, bool) { + if o == nil || IsNil(o.SearchAfter) { + return nil, false + } + return o.SearchAfter, true +} + +// HasSearchAfter returns a boolean if a field has been set. +func (o *FindSloDefinitionsResponseOneOf1) HasSearchAfter() bool { + if o != nil && !IsNil(o.SearchAfter) { + return true + } + + return false +} + +// SetSearchAfter gets a reference to the given []string and assigns it to the SearchAfter field. +func (o *FindSloDefinitionsResponseOneOf1) SetSearchAfter(v []string) { + o.SearchAfter = v +} + +// GetTotal returns the Total field value if set, zero value otherwise. +func (o *FindSloDefinitionsResponseOneOf1) GetTotal() float64 { + if o == nil || IsNil(o.Total) { + var ret float64 + return ret + } + return *o.Total +} + +// GetTotalOk returns a tuple with the Total field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloDefinitionsResponseOneOf1) GetTotalOk() (*float64, bool) { + if o == nil || IsNil(o.Total) { + return nil, false + } + return o.Total, true +} + +// HasTotal returns a boolean if a field has been set. +func (o *FindSloDefinitionsResponseOneOf1) HasTotal() bool { + if o != nil && !IsNil(o.Total) { + return true + } + + return false +} + +// SetTotal gets a reference to the given float64 and assigns it to the Total field. +func (o *FindSloDefinitionsResponseOneOf1) SetTotal(v float64) { + o.Total = &v +} + +// GetResults returns the Results field value if set, zero value otherwise. +func (o *FindSloDefinitionsResponseOneOf1) GetResults() []SloWithSummaryResponse { + if o == nil || IsNil(o.Results) { + var ret []SloWithSummaryResponse + return ret + } + return o.Results +} + +// GetResultsOk returns a tuple with the Results field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloDefinitionsResponseOneOf1) GetResultsOk() ([]SloWithSummaryResponse, bool) { + if o == nil || IsNil(o.Results) { + return nil, false + } + return o.Results, true +} + +// HasResults returns a boolean if a field has been set. +func (o *FindSloDefinitionsResponseOneOf1) HasResults() bool { + if o != nil && !IsNil(o.Results) { + return true + } + + return false +} + +// SetResults gets a reference to the given []SloWithSummaryResponse and assigns it to the Results field. +func (o *FindSloDefinitionsResponseOneOf1) SetResults(v []SloWithSummaryResponse) { + o.Results = v +} + +func (o FindSloDefinitionsResponseOneOf1) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o FindSloDefinitionsResponseOneOf1) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.Page) { + toSerialize["page"] = o.Page + } + if !IsNil(o.PerPage) { + toSerialize["perPage"] = o.PerPage + } + if !IsNil(o.Size) { + toSerialize["size"] = o.Size + } + if !IsNil(o.SearchAfter) { + toSerialize["searchAfter"] = o.SearchAfter + } + if !IsNil(o.Total) { + toSerialize["total"] = o.Total + } + if !IsNil(o.Results) { + toSerialize["results"] = o.Results + } + return toSerialize, nil +} + +type NullableFindSloDefinitionsResponseOneOf1 struct { + value *FindSloDefinitionsResponseOneOf1 + isSet bool +} + +func (v NullableFindSloDefinitionsResponseOneOf1) Get() *FindSloDefinitionsResponseOneOf1 { + return v.value +} + +func (v *NullableFindSloDefinitionsResponseOneOf1) Set(val *FindSloDefinitionsResponseOneOf1) { + v.value = val + v.isSet = true +} + +func (v NullableFindSloDefinitionsResponseOneOf1) IsSet() bool { + return v.isSet +} + +func (v *NullableFindSloDefinitionsResponseOneOf1) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableFindSloDefinitionsResponseOneOf1(val *FindSloDefinitionsResponseOneOf1) *NullableFindSloDefinitionsResponseOneOf1 { + return &NullableFindSloDefinitionsResponseOneOf1{value: val, isSet: true} +} + +func (v NullableFindSloDefinitionsResponseOneOf1) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableFindSloDefinitionsResponseOneOf1) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_find_slo_response.go b/generated/slo/model_find_slo_response.go index fe90673e2..8937d77c8 100644 --- a/generated/slo/model_find_slo_response.go +++ b/generated/slo/model_find_slo_response.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -19,10 +19,13 @@ var _ MappedNullable = &FindSloResponse{} // FindSloResponse A paginated response of SLOs matching the query. type FindSloResponse struct { - Page *float64 `json:"page,omitempty"` - PerPage *float64 `json:"perPage,omitempty"` - Total *float64 `json:"total,omitempty"` - Results []SloResponse `json:"results,omitempty"` + // Size provided for cursor based pagination + Size *float64 `json:"size,omitempty"` + SearchAfter *string `json:"searchAfter,omitempty"` + Page *float64 `json:"page,omitempty"` + PerPage *float64 `json:"perPage,omitempty"` + Total *float64 `json:"total,omitempty"` + Results []SloWithSummaryResponse `json:"results,omitempty"` } // NewFindSloResponse instantiates a new FindSloResponse object @@ -42,6 +45,70 @@ func NewFindSloResponseWithDefaults() *FindSloResponse { return &this } +// GetSize returns the Size field value if set, zero value otherwise. +func (o *FindSloResponse) GetSize() float64 { + if o == nil || IsNil(o.Size) { + var ret float64 + return ret + } + return *o.Size +} + +// GetSizeOk returns a tuple with the Size field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloResponse) GetSizeOk() (*float64, bool) { + if o == nil || IsNil(o.Size) { + return nil, false + } + return o.Size, true +} + +// HasSize returns a boolean if a field has been set. +func (o *FindSloResponse) HasSize() bool { + if o != nil && !IsNil(o.Size) { + return true + } + + return false +} + +// SetSize gets a reference to the given float64 and assigns it to the Size field. +func (o *FindSloResponse) SetSize(v float64) { + o.Size = &v +} + +// GetSearchAfter returns the SearchAfter field value if set, zero value otherwise. +func (o *FindSloResponse) GetSearchAfter() string { + if o == nil || IsNil(o.SearchAfter) { + var ret string + return ret + } + return *o.SearchAfter +} + +// GetSearchAfterOk returns a tuple with the SearchAfter field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *FindSloResponse) GetSearchAfterOk() (*string, bool) { + if o == nil || IsNil(o.SearchAfter) { + return nil, false + } + return o.SearchAfter, true +} + +// HasSearchAfter returns a boolean if a field has been set. +func (o *FindSloResponse) HasSearchAfter() bool { + if o != nil && !IsNil(o.SearchAfter) { + return true + } + + return false +} + +// SetSearchAfter gets a reference to the given string and assigns it to the SearchAfter field. +func (o *FindSloResponse) SetSearchAfter(v string) { + o.SearchAfter = &v +} + // GetPage returns the Page field value if set, zero value otherwise. func (o *FindSloResponse) GetPage() float64 { if o == nil || IsNil(o.Page) { @@ -139,9 +206,9 @@ func (o *FindSloResponse) SetTotal(v float64) { } // GetResults returns the Results field value if set, zero value otherwise. -func (o *FindSloResponse) GetResults() []SloResponse { +func (o *FindSloResponse) GetResults() []SloWithSummaryResponse { if o == nil || IsNil(o.Results) { - var ret []SloResponse + var ret []SloWithSummaryResponse return ret } return o.Results @@ -149,7 +216,7 @@ func (o *FindSloResponse) GetResults() []SloResponse { // GetResultsOk returns a tuple with the Results field value if set, nil otherwise // and a boolean to check if the value has been set. -func (o *FindSloResponse) GetResultsOk() ([]SloResponse, bool) { +func (o *FindSloResponse) GetResultsOk() ([]SloWithSummaryResponse, bool) { if o == nil || IsNil(o.Results) { return nil, false } @@ -165,8 +232,8 @@ func (o *FindSloResponse) HasResults() bool { return false } -// SetResults gets a reference to the given []SloResponse and assigns it to the Results field. -func (o *FindSloResponse) SetResults(v []SloResponse) { +// SetResults gets a reference to the given []SloWithSummaryResponse and assigns it to the Results field. +func (o *FindSloResponse) SetResults(v []SloWithSummaryResponse) { o.Results = v } @@ -180,6 +247,12 @@ func (o FindSloResponse) MarshalJSON() ([]byte, error) { func (o FindSloResponse) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} + if !IsNil(o.Size) { + toSerialize["size"] = o.Size + } + if !IsNil(o.SearchAfter) { + toSerialize["searchAfter"] = o.SearchAfter + } if !IsNil(o.Page) { toSerialize["page"] = o.Page } diff --git a/generated/slo/model_slo_response_group_by.go b/generated/slo/model_group_by.go similarity index 53% rename from generated/slo/model_slo_response_group_by.go rename to generated/slo/model_group_by.go index 1c7fd3f9c..c6a486938 100644 --- a/generated/slo/model_slo_response_group_by.go +++ b/generated/slo/model_group_by.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -15,35 +15,35 @@ import ( "fmt" ) -// SloResponseGroupBy - optional group by field to use to generate an SLO per distinct value -type SloResponseGroupBy struct { +// GroupBy - optional group by field or fields to use to generate an SLO per distinct value +type GroupBy struct { ArrayOfString *[]string String *string } -// []stringAsSloResponseGroupBy is a convenience function that returns []string wrapped in SloResponseGroupBy -func ArrayOfStringAsSloResponseGroupBy(v *[]string) SloResponseGroupBy { - return SloResponseGroupBy{ +// []stringAsGroupBy is a convenience function that returns []string wrapped in GroupBy +func ArrayOfStringAsGroupBy(v *[]string) GroupBy { + return GroupBy{ ArrayOfString: v, } } -// stringAsSloResponseGroupBy is a convenience function that returns string wrapped in SloResponseGroupBy -func StringAsSloResponseGroupBy(v *string) SloResponseGroupBy { - return SloResponseGroupBy{ +// stringAsGroupBy is a convenience function that returns string wrapped in GroupBy +func StringAsGroupBy(v *string) GroupBy { + return GroupBy{ String: v, } } // Unmarshal JSON data into one of the pointers in the struct -func (dst *SloResponseGroupBy) UnmarshalJSON(data []byte) error { +func (dst *GroupBy) UnmarshalJSON(data []byte) error { var err error match := 0 // try to unmarshal data into ArrayOfString err = json.Unmarshal(data, &dst.ArrayOfString) if err == nil { - jsonArraystring, _ := json.Marshal(dst.ArrayOfString) - if string(jsonArraystring) == "{}" { // empty struct + jsonArray, _ := json.Marshal(dst.ArrayOfString) + if string(jsonArray) == "{}" { // empty struct dst.ArrayOfString = nil } else { match++ @@ -70,16 +70,16 @@ func (dst *SloResponseGroupBy) UnmarshalJSON(data []byte) error { dst.ArrayOfString = nil dst.String = nil - return fmt.Errorf("data matches more than one schema in oneOf(SloResponseGroupBy)") + return fmt.Errorf("data matches more than one schema in oneOf(GroupBy)") } else if match == 1 { return nil // exactly one match } else { // no match - return fmt.Errorf("data failed to match schemas in oneOf(SloResponseGroupBy)") + return fmt.Errorf("data failed to match schemas in oneOf(GroupBy)") } } // Marshal data from the first non-nil pointers in the struct to JSON -func (src SloResponseGroupBy) MarshalJSON() ([]byte, error) { +func (src GroupBy) MarshalJSON() ([]byte, error) { if src.ArrayOfString != nil { return json.Marshal(&src.ArrayOfString) } @@ -92,7 +92,7 @@ func (src SloResponseGroupBy) MarshalJSON() ([]byte, error) { } // Get the actual instance -func (obj *SloResponseGroupBy) GetActualInstance() interface{} { +func (obj *GroupBy) GetActualInstance() interface{} { if obj == nil { return nil } @@ -108,38 +108,38 @@ func (obj *SloResponseGroupBy) GetActualInstance() interface{} { return nil } -type NullableSloResponseGroupBy struct { - value *SloResponseGroupBy +type NullableGroupBy struct { + value *GroupBy isSet bool } -func (v NullableSloResponseGroupBy) Get() *SloResponseGroupBy { +func (v NullableGroupBy) Get() *GroupBy { return v.value } -func (v *NullableSloResponseGroupBy) Set(val *SloResponseGroupBy) { +func (v *NullableGroupBy) Set(val *GroupBy) { v.value = val v.isSet = true } -func (v NullableSloResponseGroupBy) IsSet() bool { +func (v NullableGroupBy) IsSet() bool { return v.isSet } -func (v *NullableSloResponseGroupBy) Unset() { +func (v *NullableGroupBy) Unset() { v.value = nil v.isSet = false } -func NewNullableSloResponseGroupBy(val *SloResponseGroupBy) *NullableSloResponseGroupBy { - return &NullableSloResponseGroupBy{value: val, isSet: true} +func NewNullableGroupBy(val *GroupBy) *NullableGroupBy { + return &NullableGroupBy{value: val, isSet: true} } -func (v NullableSloResponseGroupBy) MarshalJSON() ([]byte, error) { +func (v NullableGroupBy) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } -func (v *NullableSloResponseGroupBy) UnmarshalJSON(src []byte) error { +func (v *NullableGroupBy) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) } diff --git a/generated/slo/model_historical_summary_request.go b/generated/slo/model_historical_summary_request.go deleted file mode 100644 index a06576baa..000000000 --- a/generated/slo/model_historical_summary_request.go +++ /dev/null @@ -1,116 +0,0 @@ -/* -SLOs - -OpenAPI schema for SLOs endpoints - -API version: 1.0 -*/ - -// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. - -package slo - -import ( - "encoding/json" -) - -// checks if the HistoricalSummaryRequest type satisfies the MappedNullable interface at compile time -var _ MappedNullable = &HistoricalSummaryRequest{} - -// HistoricalSummaryRequest struct for HistoricalSummaryRequest -type HistoricalSummaryRequest struct { - // The list of SLO identifiers to get the historical summary for - List []string `json:"list"` -} - -// NewHistoricalSummaryRequest instantiates a new HistoricalSummaryRequest object -// This constructor will assign default values to properties that have it defined, -// and makes sure properties required by API are set, but the set of arguments -// will change when the set of required properties is changed -func NewHistoricalSummaryRequest(list []string) *HistoricalSummaryRequest { - this := HistoricalSummaryRequest{} - this.List = list - return &this -} - -// NewHistoricalSummaryRequestWithDefaults instantiates a new HistoricalSummaryRequest object -// This constructor will only assign default values to properties that have it defined, -// but it doesn't guarantee that properties required by API are set -func NewHistoricalSummaryRequestWithDefaults() *HistoricalSummaryRequest { - this := HistoricalSummaryRequest{} - return &this -} - -// GetList returns the List field value -func (o *HistoricalSummaryRequest) GetList() []string { - if o == nil { - var ret []string - return ret - } - - return o.List -} - -// GetListOk returns a tuple with the List field value -// and a boolean to check if the value has been set. -func (o *HistoricalSummaryRequest) GetListOk() ([]string, bool) { - if o == nil { - return nil, false - } - return o.List, true -} - -// SetList sets field value -func (o *HistoricalSummaryRequest) SetList(v []string) { - o.List = v -} - -func (o HistoricalSummaryRequest) MarshalJSON() ([]byte, error) { - toSerialize, err := o.ToMap() - if err != nil { - return []byte{}, err - } - return json.Marshal(toSerialize) -} - -func (o HistoricalSummaryRequest) ToMap() (map[string]interface{}, error) { - toSerialize := map[string]interface{}{} - toSerialize["list"] = o.List - return toSerialize, nil -} - -type NullableHistoricalSummaryRequest struct { - value *HistoricalSummaryRequest - isSet bool -} - -func (v NullableHistoricalSummaryRequest) Get() *HistoricalSummaryRequest { - return v.value -} - -func (v *NullableHistoricalSummaryRequest) Set(val *HistoricalSummaryRequest) { - v.value = val - v.isSet = true -} - -func (v NullableHistoricalSummaryRequest) IsSet() bool { - return v.isSet -} - -func (v *NullableHistoricalSummaryRequest) Unset() { - v.value = nil - v.isSet = false -} - -func NewNullableHistoricalSummaryRequest(val *HistoricalSummaryRequest) *NullableHistoricalSummaryRequest { - return &NullableHistoricalSummaryRequest{value: val, isSet: true} -} - -func (v NullableHistoricalSummaryRequest) MarshalJSON() ([]byte, error) { - return json.Marshal(v.value) -} - -func (v *NullableHistoricalSummaryRequest) UnmarshalJSON(src []byte) error { - v.isSet = true - return json.Unmarshal(src, &v.value) -} diff --git a/generated/slo/model_historical_summary_response_inner.go b/generated/slo/model_historical_summary_response_inner.go deleted file mode 100644 index 6a87116d2..000000000 --- a/generated/slo/model_historical_summary_response_inner.go +++ /dev/null @@ -1,232 +0,0 @@ -/* -SLOs - -OpenAPI schema for SLOs endpoints - -API version: 1.0 -*/ - -// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. - -package slo - -import ( - "encoding/json" -) - -// checks if the HistoricalSummaryResponseInner type satisfies the MappedNullable interface at compile time -var _ MappedNullable = &HistoricalSummaryResponseInner{} - -// HistoricalSummaryResponseInner struct for HistoricalSummaryResponseInner -type HistoricalSummaryResponseInner struct { - Date *string `json:"date,omitempty"` - Status *SummaryStatus `json:"status,omitempty"` - SliValue *float64 `json:"sliValue,omitempty"` - ErrorBudget *ErrorBudget `json:"errorBudget,omitempty"` -} - -// NewHistoricalSummaryResponseInner instantiates a new HistoricalSummaryResponseInner object -// This constructor will assign default values to properties that have it defined, -// and makes sure properties required by API are set, but the set of arguments -// will change when the set of required properties is changed -func NewHistoricalSummaryResponseInner() *HistoricalSummaryResponseInner { - this := HistoricalSummaryResponseInner{} - return &this -} - -// NewHistoricalSummaryResponseInnerWithDefaults instantiates a new HistoricalSummaryResponseInner object -// This constructor will only assign default values to properties that have it defined, -// but it doesn't guarantee that properties required by API are set -func NewHistoricalSummaryResponseInnerWithDefaults() *HistoricalSummaryResponseInner { - this := HistoricalSummaryResponseInner{} - return &this -} - -// GetDate returns the Date field value if set, zero value otherwise. -func (o *HistoricalSummaryResponseInner) GetDate() string { - if o == nil || IsNil(o.Date) { - var ret string - return ret - } - return *o.Date -} - -// GetDateOk returns a tuple with the Date field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *HistoricalSummaryResponseInner) GetDateOk() (*string, bool) { - if o == nil || IsNil(o.Date) { - return nil, false - } - return o.Date, true -} - -// HasDate returns a boolean if a field has been set. -func (o *HistoricalSummaryResponseInner) HasDate() bool { - if o != nil && !IsNil(o.Date) { - return true - } - - return false -} - -// SetDate gets a reference to the given string and assigns it to the Date field. -func (o *HistoricalSummaryResponseInner) SetDate(v string) { - o.Date = &v -} - -// GetStatus returns the Status field value if set, zero value otherwise. -func (o *HistoricalSummaryResponseInner) GetStatus() SummaryStatus { - if o == nil || IsNil(o.Status) { - var ret SummaryStatus - return ret - } - return *o.Status -} - -// GetStatusOk returns a tuple with the Status field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *HistoricalSummaryResponseInner) GetStatusOk() (*SummaryStatus, bool) { - if o == nil || IsNil(o.Status) { - return nil, false - } - return o.Status, true -} - -// HasStatus returns a boolean if a field has been set. -func (o *HistoricalSummaryResponseInner) HasStatus() bool { - if o != nil && !IsNil(o.Status) { - return true - } - - return false -} - -// SetStatus gets a reference to the given SummaryStatus and assigns it to the Status field. -func (o *HistoricalSummaryResponseInner) SetStatus(v SummaryStatus) { - o.Status = &v -} - -// GetSliValue returns the SliValue field value if set, zero value otherwise. -func (o *HistoricalSummaryResponseInner) GetSliValue() float64 { - if o == nil || IsNil(o.SliValue) { - var ret float64 - return ret - } - return *o.SliValue -} - -// GetSliValueOk returns a tuple with the SliValue field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *HistoricalSummaryResponseInner) GetSliValueOk() (*float64, bool) { - if o == nil || IsNil(o.SliValue) { - return nil, false - } - return o.SliValue, true -} - -// HasSliValue returns a boolean if a field has been set. -func (o *HistoricalSummaryResponseInner) HasSliValue() bool { - if o != nil && !IsNil(o.SliValue) { - return true - } - - return false -} - -// SetSliValue gets a reference to the given float64 and assigns it to the SliValue field. -func (o *HistoricalSummaryResponseInner) SetSliValue(v float64) { - o.SliValue = &v -} - -// GetErrorBudget returns the ErrorBudget field value if set, zero value otherwise. -func (o *HistoricalSummaryResponseInner) GetErrorBudget() ErrorBudget { - if o == nil || IsNil(o.ErrorBudget) { - var ret ErrorBudget - return ret - } - return *o.ErrorBudget -} - -// GetErrorBudgetOk returns a tuple with the ErrorBudget field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *HistoricalSummaryResponseInner) GetErrorBudgetOk() (*ErrorBudget, bool) { - if o == nil || IsNil(o.ErrorBudget) { - return nil, false - } - return o.ErrorBudget, true -} - -// HasErrorBudget returns a boolean if a field has been set. -func (o *HistoricalSummaryResponseInner) HasErrorBudget() bool { - if o != nil && !IsNil(o.ErrorBudget) { - return true - } - - return false -} - -// SetErrorBudget gets a reference to the given ErrorBudget and assigns it to the ErrorBudget field. -func (o *HistoricalSummaryResponseInner) SetErrorBudget(v ErrorBudget) { - o.ErrorBudget = &v -} - -func (o HistoricalSummaryResponseInner) MarshalJSON() ([]byte, error) { - toSerialize, err := o.ToMap() - if err != nil { - return []byte{}, err - } - return json.Marshal(toSerialize) -} - -func (o HistoricalSummaryResponseInner) ToMap() (map[string]interface{}, error) { - toSerialize := map[string]interface{}{} - if !IsNil(o.Date) { - toSerialize["date"] = o.Date - } - if !IsNil(o.Status) { - toSerialize["status"] = o.Status - } - if !IsNil(o.SliValue) { - toSerialize["sliValue"] = o.SliValue - } - if !IsNil(o.ErrorBudget) { - toSerialize["errorBudget"] = o.ErrorBudget - } - return toSerialize, nil -} - -type NullableHistoricalSummaryResponseInner struct { - value *HistoricalSummaryResponseInner - isSet bool -} - -func (v NullableHistoricalSummaryResponseInner) Get() *HistoricalSummaryResponseInner { - return v.value -} - -func (v *NullableHistoricalSummaryResponseInner) Set(val *HistoricalSummaryResponseInner) { - v.value = val - v.isSet = true -} - -func (v NullableHistoricalSummaryResponseInner) IsSet() bool { - return v.isSet -} - -func (v *NullableHistoricalSummaryResponseInner) Unset() { - v.value = nil - v.isSet = false -} - -func NewNullableHistoricalSummaryResponseInner(val *HistoricalSummaryResponseInner) *NullableHistoricalSummaryResponseInner { - return &NullableHistoricalSummaryResponseInner{value: val, isSet: true} -} - -func (v NullableHistoricalSummaryResponseInner) MarshalJSON() ([]byte, error) { - return json.Marshal(v.value) -} - -func (v *NullableHistoricalSummaryResponseInner) UnmarshalJSON(src []byte) error { - v.isSet = true - return json.Unmarshal(src, &v.value) -} diff --git a/generated/slo/model_indicator_properties_apm_availability.go b/generated/slo/model_indicator_properties_apm_availability.go index a449183cc..376e00abb 100644 --- a/generated/slo/model_indicator_properties_apm_availability.go +++ b/generated/slo/model_indicator_properties_apm_availability.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_indicator_properties_apm_availability_params.go b/generated/slo/model_indicator_properties_apm_availability_params.go index 5872135c1..7f6b63959 100644 --- a/generated/slo/model_indicator_properties_apm_availability_params.go +++ b/generated/slo/model_indicator_properties_apm_availability_params.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_indicator_properties_apm_latency.go b/generated/slo/model_indicator_properties_apm_latency.go index 5876d056a..29fe5f4b6 100644 --- a/generated/slo/model_indicator_properties_apm_latency.go +++ b/generated/slo/model_indicator_properties_apm_latency.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_indicator_properties_apm_latency_params.go b/generated/slo/model_indicator_properties_apm_latency_params.go index 74c484f39..00afb00bd 100644 --- a/generated/slo/model_indicator_properties_apm_latency_params.go +++ b/generated/slo/model_indicator_properties_apm_latency_params.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_indicator_properties_custom_kql.go b/generated/slo/model_indicator_properties_custom_kql.go index 657856d3f..c2ca95e32 100644 --- a/generated/slo/model_indicator_properties_custom_kql.go +++ b/generated/slo/model_indicator_properties_custom_kql.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -17,7 +17,7 @@ import ( // checks if the IndicatorPropertiesCustomKql type satisfies the MappedNullable interface at compile time var _ MappedNullable = &IndicatorPropertiesCustomKql{} -// IndicatorPropertiesCustomKql Defines properties for a custom KQL indicator type +// IndicatorPropertiesCustomKql Defines properties for a custom query indicator type type IndicatorPropertiesCustomKql struct { Params IndicatorPropertiesCustomKqlParams `json:"params"` // The type of indicator. diff --git a/generated/slo/model_indicator_properties_custom_kql_params.go b/generated/slo/model_indicator_properties_custom_kql_params.go index e84d0b41d..8920b91b8 100644 --- a/generated/slo/model_indicator_properties_custom_kql_params.go +++ b/generated/slo/model_indicator_properties_custom_kql_params.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -21,12 +21,11 @@ var _ MappedNullable = &IndicatorPropertiesCustomKqlParams{} type IndicatorPropertiesCustomKqlParams struct { // The index or index pattern to use Index string `json:"index"` - // the KQL query to filter the documents with. - Filter *string `json:"filter,omitempty"` - // the KQL query used to define the good events. - Good string `json:"good"` - // the KQL query used to define all events. - Total string `json:"total"` + // The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. + DataViewId *string `json:"dataViewId,omitempty"` + Filter *KqlWithFilters `json:"filter,omitempty"` + Good KqlWithFiltersGood `json:"good"` + Total KqlWithFiltersTotal `json:"total"` // The timestamp field used in the source indice. TimestampField string `json:"timestampField"` } @@ -35,7 +34,7 @@ type IndicatorPropertiesCustomKqlParams struct { // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed -func NewIndicatorPropertiesCustomKqlParams(index string, good string, total string, timestampField string) *IndicatorPropertiesCustomKqlParams { +func NewIndicatorPropertiesCustomKqlParams(index string, good KqlWithFiltersGood, total KqlWithFiltersTotal, timestampField string) *IndicatorPropertiesCustomKqlParams { this := IndicatorPropertiesCustomKqlParams{} this.Index = index this.Good = good @@ -76,10 +75,42 @@ func (o *IndicatorPropertiesCustomKqlParams) SetIndex(v string) { o.Index = v } +// GetDataViewId returns the DataViewId field value if set, zero value otherwise. +func (o *IndicatorPropertiesCustomKqlParams) GetDataViewId() string { + if o == nil || IsNil(o.DataViewId) { + var ret string + return ret + } + return *o.DataViewId +} + +// GetDataViewIdOk returns a tuple with the DataViewId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *IndicatorPropertiesCustomKqlParams) GetDataViewIdOk() (*string, bool) { + if o == nil || IsNil(o.DataViewId) { + return nil, false + } + return o.DataViewId, true +} + +// HasDataViewId returns a boolean if a field has been set. +func (o *IndicatorPropertiesCustomKqlParams) HasDataViewId() bool { + if o != nil && !IsNil(o.DataViewId) { + return true + } + + return false +} + +// SetDataViewId gets a reference to the given string and assigns it to the DataViewId field. +func (o *IndicatorPropertiesCustomKqlParams) SetDataViewId(v string) { + o.DataViewId = &v +} + // GetFilter returns the Filter field value if set, zero value otherwise. -func (o *IndicatorPropertiesCustomKqlParams) GetFilter() string { +func (o *IndicatorPropertiesCustomKqlParams) GetFilter() KqlWithFilters { if o == nil || IsNil(o.Filter) { - var ret string + var ret KqlWithFilters return ret } return *o.Filter @@ -87,7 +118,7 @@ func (o *IndicatorPropertiesCustomKqlParams) GetFilter() string { // GetFilterOk returns a tuple with the Filter field value if set, nil otherwise // and a boolean to check if the value has been set. -func (o *IndicatorPropertiesCustomKqlParams) GetFilterOk() (*string, bool) { +func (o *IndicatorPropertiesCustomKqlParams) GetFilterOk() (*KqlWithFilters, bool) { if o == nil || IsNil(o.Filter) { return nil, false } @@ -103,15 +134,15 @@ func (o *IndicatorPropertiesCustomKqlParams) HasFilter() bool { return false } -// SetFilter gets a reference to the given string and assigns it to the Filter field. -func (o *IndicatorPropertiesCustomKqlParams) SetFilter(v string) { +// SetFilter gets a reference to the given KqlWithFilters and assigns it to the Filter field. +func (o *IndicatorPropertiesCustomKqlParams) SetFilter(v KqlWithFilters) { o.Filter = &v } // GetGood returns the Good field value -func (o *IndicatorPropertiesCustomKqlParams) GetGood() string { +func (o *IndicatorPropertiesCustomKqlParams) GetGood() KqlWithFiltersGood { if o == nil { - var ret string + var ret KqlWithFiltersGood return ret } @@ -120,7 +151,7 @@ func (o *IndicatorPropertiesCustomKqlParams) GetGood() string { // GetGoodOk returns a tuple with the Good field value // and a boolean to check if the value has been set. -func (o *IndicatorPropertiesCustomKqlParams) GetGoodOk() (*string, bool) { +func (o *IndicatorPropertiesCustomKqlParams) GetGoodOk() (*KqlWithFiltersGood, bool) { if o == nil { return nil, false } @@ -128,14 +159,14 @@ func (o *IndicatorPropertiesCustomKqlParams) GetGoodOk() (*string, bool) { } // SetGood sets field value -func (o *IndicatorPropertiesCustomKqlParams) SetGood(v string) { +func (o *IndicatorPropertiesCustomKqlParams) SetGood(v KqlWithFiltersGood) { o.Good = v } // GetTotal returns the Total field value -func (o *IndicatorPropertiesCustomKqlParams) GetTotal() string { +func (o *IndicatorPropertiesCustomKqlParams) GetTotal() KqlWithFiltersTotal { if o == nil { - var ret string + var ret KqlWithFiltersTotal return ret } @@ -144,7 +175,7 @@ func (o *IndicatorPropertiesCustomKqlParams) GetTotal() string { // GetTotalOk returns a tuple with the Total field value // and a boolean to check if the value has been set. -func (o *IndicatorPropertiesCustomKqlParams) GetTotalOk() (*string, bool) { +func (o *IndicatorPropertiesCustomKqlParams) GetTotalOk() (*KqlWithFiltersTotal, bool) { if o == nil { return nil, false } @@ -152,7 +183,7 @@ func (o *IndicatorPropertiesCustomKqlParams) GetTotalOk() (*string, bool) { } // SetTotal sets field value -func (o *IndicatorPropertiesCustomKqlParams) SetTotal(v string) { +func (o *IndicatorPropertiesCustomKqlParams) SetTotal(v KqlWithFiltersTotal) { o.Total = v } @@ -191,6 +222,9 @@ func (o IndicatorPropertiesCustomKqlParams) MarshalJSON() ([]byte, error) { func (o IndicatorPropertiesCustomKqlParams) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} toSerialize["index"] = o.Index + if !IsNil(o.DataViewId) { + toSerialize["dataViewId"] = o.DataViewId + } if !IsNil(o.Filter) { toSerialize["filter"] = o.Filter } diff --git a/generated/slo/model_indicator_properties_custom_metric.go b/generated/slo/model_indicator_properties_custom_metric.go index 80b194643..0c611620e 100644 --- a/generated/slo/model_indicator_properties_custom_metric.go +++ b/generated/slo/model_indicator_properties_custom_metric.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_indicator_properties_custom_metric_params.go b/generated/slo/model_indicator_properties_custom_metric_params.go index 88f7cc8db..d866a92db 100644 --- a/generated/slo/model_indicator_properties_custom_metric_params.go +++ b/generated/slo/model_indicator_properties_custom_metric_params.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -21,6 +21,8 @@ var _ MappedNullable = &IndicatorPropertiesCustomMetricParams{} type IndicatorPropertiesCustomMetricParams struct { // The index or index pattern to use Index string `json:"index"` + // The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. + DataViewId *string `json:"dataViewId,omitempty"` // the KQL query to filter the documents with. Filter *string `json:"filter,omitempty"` // The timestamp field used in the source indice. @@ -74,6 +76,38 @@ func (o *IndicatorPropertiesCustomMetricParams) SetIndex(v string) { o.Index = v } +// GetDataViewId returns the DataViewId field value if set, zero value otherwise. +func (o *IndicatorPropertiesCustomMetricParams) GetDataViewId() string { + if o == nil || IsNil(o.DataViewId) { + var ret string + return ret + } + return *o.DataViewId +} + +// GetDataViewIdOk returns a tuple with the DataViewId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *IndicatorPropertiesCustomMetricParams) GetDataViewIdOk() (*string, bool) { + if o == nil || IsNil(o.DataViewId) { + return nil, false + } + return o.DataViewId, true +} + +// HasDataViewId returns a boolean if a field has been set. +func (o *IndicatorPropertiesCustomMetricParams) HasDataViewId() bool { + if o != nil && !IsNil(o.DataViewId) { + return true + } + + return false +} + +// SetDataViewId gets a reference to the given string and assigns it to the DataViewId field. +func (o *IndicatorPropertiesCustomMetricParams) SetDataViewId(v string) { + o.DataViewId = &v +} + // GetFilter returns the Filter field value if set, zero value otherwise. func (o *IndicatorPropertiesCustomMetricParams) GetFilter() string { if o == nil || IsNil(o.Filter) { @@ -189,6 +223,9 @@ func (o IndicatorPropertiesCustomMetricParams) MarshalJSON() ([]byte, error) { func (o IndicatorPropertiesCustomMetricParams) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} toSerialize["index"] = o.Index + if !IsNil(o.DataViewId) { + toSerialize["dataViewId"] = o.DataViewId + } if !IsNil(o.Filter) { toSerialize["filter"] = o.Filter } diff --git a/generated/slo/model_indicator_properties_custom_metric_params_good.go b/generated/slo/model_indicator_properties_custom_metric_params_good.go index ada18b405..a3ae2db5f 100644 --- a/generated/slo/model_indicator_properties_custom_metric_params_good.go +++ b/generated/slo/model_indicator_properties_custom_metric_params_good.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_indicator_properties_custom_metric_params_good_metrics_inner.go b/generated/slo/model_indicator_properties_custom_metric_params_good_metrics_inner.go index 59c91d2fb..cb477e125 100644 --- a/generated/slo/model_indicator_properties_custom_metric_params_good_metrics_inner.go +++ b/generated/slo/model_indicator_properties_custom_metric_params_good_metrics_inner.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -21,7 +21,7 @@ var _ MappedNullable = &IndicatorPropertiesCustomMetricParamsGoodMetricsInner{} type IndicatorPropertiesCustomMetricParamsGoodMetricsInner struct { // The name of the metric. Only valid options are A-Z Name string `json:"name"` - // The aggregation type of the metric. Only valid option is \"sum\" + // The aggregation type of the metric. Aggregation string `json:"aggregation"` // The field of the metric. Field string `json:"field"` diff --git a/generated/slo/model_indicator_properties_custom_metric_params_total.go b/generated/slo/model_indicator_properties_custom_metric_params_total.go index 58d6d255b..5fa26b8ec 100644 --- a/generated/slo/model_indicator_properties_custom_metric_params_total.go +++ b/generated/slo/model_indicator_properties_custom_metric_params_total.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -20,7 +20,7 @@ var _ MappedNullable = &IndicatorPropertiesCustomMetricParamsTotal{} // IndicatorPropertiesCustomMetricParamsTotal An object defining the \"total\" metrics and equation type IndicatorPropertiesCustomMetricParamsTotal struct { // List of metrics with their name, aggregation type, and field. - Metrics []IndicatorPropertiesCustomMetricParamsTotalMetricsInner `json:"metrics"` + Metrics []IndicatorPropertiesCustomMetricParamsGoodMetricsInner `json:"metrics"` // The equation to calculate the \"total\" metric. Equation string `json:"equation"` } @@ -29,7 +29,7 @@ type IndicatorPropertiesCustomMetricParamsTotal struct { // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed -func NewIndicatorPropertiesCustomMetricParamsTotal(metrics []IndicatorPropertiesCustomMetricParamsTotalMetricsInner, equation string) *IndicatorPropertiesCustomMetricParamsTotal { +func NewIndicatorPropertiesCustomMetricParamsTotal(metrics []IndicatorPropertiesCustomMetricParamsGoodMetricsInner, equation string) *IndicatorPropertiesCustomMetricParamsTotal { this := IndicatorPropertiesCustomMetricParamsTotal{} this.Metrics = metrics this.Equation = equation @@ -45,9 +45,9 @@ func NewIndicatorPropertiesCustomMetricParamsTotalWithDefaults() *IndicatorPrope } // GetMetrics returns the Metrics field value -func (o *IndicatorPropertiesCustomMetricParamsTotal) GetMetrics() []IndicatorPropertiesCustomMetricParamsTotalMetricsInner { +func (o *IndicatorPropertiesCustomMetricParamsTotal) GetMetrics() []IndicatorPropertiesCustomMetricParamsGoodMetricsInner { if o == nil { - var ret []IndicatorPropertiesCustomMetricParamsTotalMetricsInner + var ret []IndicatorPropertiesCustomMetricParamsGoodMetricsInner return ret } @@ -56,7 +56,7 @@ func (o *IndicatorPropertiesCustomMetricParamsTotal) GetMetrics() []IndicatorPro // GetMetricsOk returns a tuple with the Metrics field value // and a boolean to check if the value has been set. -func (o *IndicatorPropertiesCustomMetricParamsTotal) GetMetricsOk() ([]IndicatorPropertiesCustomMetricParamsTotalMetricsInner, bool) { +func (o *IndicatorPropertiesCustomMetricParamsTotal) GetMetricsOk() ([]IndicatorPropertiesCustomMetricParamsGoodMetricsInner, bool) { if o == nil { return nil, false } @@ -64,7 +64,7 @@ func (o *IndicatorPropertiesCustomMetricParamsTotal) GetMetricsOk() ([]Indicator } // SetMetrics sets field value -func (o *IndicatorPropertiesCustomMetricParamsTotal) SetMetrics(v []IndicatorPropertiesCustomMetricParamsTotalMetricsInner) { +func (o *IndicatorPropertiesCustomMetricParamsTotal) SetMetrics(v []IndicatorPropertiesCustomMetricParamsGoodMetricsInner) { o.Metrics = v } diff --git a/generated/slo/model_indicator_properties_custom_metric_params_total_metrics_inner.go b/generated/slo/model_indicator_properties_custom_metric_params_total_metrics_inner.go deleted file mode 100644 index 59a0aa676..000000000 --- a/generated/slo/model_indicator_properties_custom_metric_params_total_metrics_inner.go +++ /dev/null @@ -1,209 +0,0 @@ -/* -SLOs - -OpenAPI schema for SLOs endpoints - -API version: 1.0 -*/ - -// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. - -package slo - -import ( - "encoding/json" -) - -// checks if the IndicatorPropertiesCustomMetricParamsTotalMetricsInner type satisfies the MappedNullable interface at compile time -var _ MappedNullable = &IndicatorPropertiesCustomMetricParamsTotalMetricsInner{} - -// IndicatorPropertiesCustomMetricParamsTotalMetricsInner struct for IndicatorPropertiesCustomMetricParamsTotalMetricsInner -type IndicatorPropertiesCustomMetricParamsTotalMetricsInner struct { - // The name of the metric. Only valid options are A-Z - Name string `json:"name"` - // The aggregation type of the metric. Only valid option is \"sum\" - Aggregation string `json:"aggregation"` - // The field of the metric. - Field string `json:"field"` - // The filter to apply to the metric. - Filter *string `json:"filter,omitempty"` -} - -// NewIndicatorPropertiesCustomMetricParamsTotalMetricsInner instantiates a new IndicatorPropertiesCustomMetricParamsTotalMetricsInner object -// This constructor will assign default values to properties that have it defined, -// and makes sure properties required by API are set, but the set of arguments -// will change when the set of required properties is changed -func NewIndicatorPropertiesCustomMetricParamsTotalMetricsInner(name string, aggregation string, field string) *IndicatorPropertiesCustomMetricParamsTotalMetricsInner { - this := IndicatorPropertiesCustomMetricParamsTotalMetricsInner{} - this.Name = name - this.Aggregation = aggregation - this.Field = field - return &this -} - -// NewIndicatorPropertiesCustomMetricParamsTotalMetricsInnerWithDefaults instantiates a new IndicatorPropertiesCustomMetricParamsTotalMetricsInner object -// This constructor will only assign default values to properties that have it defined, -// but it doesn't guarantee that properties required by API are set -func NewIndicatorPropertiesCustomMetricParamsTotalMetricsInnerWithDefaults() *IndicatorPropertiesCustomMetricParamsTotalMetricsInner { - this := IndicatorPropertiesCustomMetricParamsTotalMetricsInner{} - return &this -} - -// GetName returns the Name field value -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetName() string { - if o == nil { - var ret string - return ret - } - - return o.Name -} - -// GetNameOk returns a tuple with the Name field value -// and a boolean to check if the value has been set. -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetNameOk() (*string, bool) { - if o == nil { - return nil, false - } - return &o.Name, true -} - -// SetName sets field value -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) SetName(v string) { - o.Name = v -} - -// GetAggregation returns the Aggregation field value -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetAggregation() string { - if o == nil { - var ret string - return ret - } - - return o.Aggregation -} - -// GetAggregationOk returns a tuple with the Aggregation field value -// and a boolean to check if the value has been set. -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetAggregationOk() (*string, bool) { - if o == nil { - return nil, false - } - return &o.Aggregation, true -} - -// SetAggregation sets field value -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) SetAggregation(v string) { - o.Aggregation = v -} - -// GetField returns the Field field value -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetField() string { - if o == nil { - var ret string - return ret - } - - return o.Field -} - -// GetFieldOk returns a tuple with the Field field value -// and a boolean to check if the value has been set. -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetFieldOk() (*string, bool) { - if o == nil { - return nil, false - } - return &o.Field, true -} - -// SetField sets field value -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) SetField(v string) { - o.Field = v -} - -// GetFilter returns the Filter field value if set, zero value otherwise. -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetFilter() string { - if o == nil || IsNil(o.Filter) { - var ret string - return ret - } - return *o.Filter -} - -// GetFilterOk returns a tuple with the Filter field value if set, nil otherwise -// and a boolean to check if the value has been set. -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) GetFilterOk() (*string, bool) { - if o == nil || IsNil(o.Filter) { - return nil, false - } - return o.Filter, true -} - -// HasFilter returns a boolean if a field has been set. -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) HasFilter() bool { - if o != nil && !IsNil(o.Filter) { - return true - } - - return false -} - -// SetFilter gets a reference to the given string and assigns it to the Filter field. -func (o *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) SetFilter(v string) { - o.Filter = &v -} - -func (o IndicatorPropertiesCustomMetricParamsTotalMetricsInner) MarshalJSON() ([]byte, error) { - toSerialize, err := o.ToMap() - if err != nil { - return []byte{}, err - } - return json.Marshal(toSerialize) -} - -func (o IndicatorPropertiesCustomMetricParamsTotalMetricsInner) ToMap() (map[string]interface{}, error) { - toSerialize := map[string]interface{}{} - toSerialize["name"] = o.Name - toSerialize["aggregation"] = o.Aggregation - toSerialize["field"] = o.Field - if !IsNil(o.Filter) { - toSerialize["filter"] = o.Filter - } - return toSerialize, nil -} - -type NullableIndicatorPropertiesCustomMetricParamsTotalMetricsInner struct { - value *IndicatorPropertiesCustomMetricParamsTotalMetricsInner - isSet bool -} - -func (v NullableIndicatorPropertiesCustomMetricParamsTotalMetricsInner) Get() *IndicatorPropertiesCustomMetricParamsTotalMetricsInner { - return v.value -} - -func (v *NullableIndicatorPropertiesCustomMetricParamsTotalMetricsInner) Set(val *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) { - v.value = val - v.isSet = true -} - -func (v NullableIndicatorPropertiesCustomMetricParamsTotalMetricsInner) IsSet() bool { - return v.isSet -} - -func (v *NullableIndicatorPropertiesCustomMetricParamsTotalMetricsInner) Unset() { - v.value = nil - v.isSet = false -} - -func NewNullableIndicatorPropertiesCustomMetricParamsTotalMetricsInner(val *IndicatorPropertiesCustomMetricParamsTotalMetricsInner) *NullableIndicatorPropertiesCustomMetricParamsTotalMetricsInner { - return &NullableIndicatorPropertiesCustomMetricParamsTotalMetricsInner{value: val, isSet: true} -} - -func (v NullableIndicatorPropertiesCustomMetricParamsTotalMetricsInner) MarshalJSON() ([]byte, error) { - return json.Marshal(v.value) -} - -func (v *NullableIndicatorPropertiesCustomMetricParamsTotalMetricsInner) UnmarshalJSON(src []byte) error { - v.isSet = true - return json.Unmarshal(src, &v.value) -} diff --git a/generated/slo/model_indicator_properties_histogram.go b/generated/slo/model_indicator_properties_histogram.go index 1ac522fc6..e04cc8e51 100644 --- a/generated/slo/model_indicator_properties_histogram.go +++ b/generated/slo/model_indicator_properties_histogram.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_indicator_properties_histogram_params.go b/generated/slo/model_indicator_properties_histogram_params.go index 5d9348681..9c2ca7d15 100644 --- a/generated/slo/model_indicator_properties_histogram_params.go +++ b/generated/slo/model_indicator_properties_histogram_params.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -21,6 +21,8 @@ var _ MappedNullable = &IndicatorPropertiesHistogramParams{} type IndicatorPropertiesHistogramParams struct { // The index or index pattern to use Index string `json:"index"` + // The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. + DataViewId *string `json:"dataViewId,omitempty"` // the KQL query to filter the documents with. Filter *string `json:"filter,omitempty"` // The timestamp field used in the source indice. @@ -74,6 +76,38 @@ func (o *IndicatorPropertiesHistogramParams) SetIndex(v string) { o.Index = v } +// GetDataViewId returns the DataViewId field value if set, zero value otherwise. +func (o *IndicatorPropertiesHistogramParams) GetDataViewId() string { + if o == nil || IsNil(o.DataViewId) { + var ret string + return ret + } + return *o.DataViewId +} + +// GetDataViewIdOk returns a tuple with the DataViewId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *IndicatorPropertiesHistogramParams) GetDataViewIdOk() (*string, bool) { + if o == nil || IsNil(o.DataViewId) { + return nil, false + } + return o.DataViewId, true +} + +// HasDataViewId returns a boolean if a field has been set. +func (o *IndicatorPropertiesHistogramParams) HasDataViewId() bool { + if o != nil && !IsNil(o.DataViewId) { + return true + } + + return false +} + +// SetDataViewId gets a reference to the given string and assigns it to the DataViewId field. +func (o *IndicatorPropertiesHistogramParams) SetDataViewId(v string) { + o.DataViewId = &v +} + // GetFilter returns the Filter field value if set, zero value otherwise. func (o *IndicatorPropertiesHistogramParams) GetFilter() string { if o == nil || IsNil(o.Filter) { @@ -189,6 +223,9 @@ func (o IndicatorPropertiesHistogramParams) MarshalJSON() ([]byte, error) { func (o IndicatorPropertiesHistogramParams) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} toSerialize["index"] = o.Index + if !IsNil(o.DataViewId) { + toSerialize["dataViewId"] = o.DataViewId + } if !IsNil(o.Filter) { toSerialize["filter"] = o.Filter } diff --git a/generated/slo/model_indicator_properties_histogram_params_good.go b/generated/slo/model_indicator_properties_histogram_params_good.go index 2d4f845f6..8a74efd41 100644 --- a/generated/slo/model_indicator_properties_histogram_params_good.go +++ b/generated/slo/model_indicator_properties_histogram_params_good.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_indicator_properties_histogram_params_total.go b/generated/slo/model_indicator_properties_histogram_params_total.go index 7581231f4..acdc4fbc8 100644 --- a/generated/slo/model_indicator_properties_histogram_params_total.go +++ b/generated/slo/model_indicator_properties_histogram_params_total.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_indicator_properties_timeslice_metric.go b/generated/slo/model_indicator_properties_timeslice_metric.go index bd5e6fde0..0b5815036 100644 --- a/generated/slo/model_indicator_properties_timeslice_metric.go +++ b/generated/slo/model_indicator_properties_timeslice_metric.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_indicator_properties_timeslice_metric_params.go b/generated/slo/model_indicator_properties_timeslice_metric_params.go index d702e10b5..686ea523e 100644 --- a/generated/slo/model_indicator_properties_timeslice_metric_params.go +++ b/generated/slo/model_indicator_properties_timeslice_metric_params.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -21,6 +21,8 @@ var _ MappedNullable = &IndicatorPropertiesTimesliceMetricParams{} type IndicatorPropertiesTimesliceMetricParams struct { // The index or index pattern to use Index string `json:"index"` + // The kibana data view id to use, primarily used to include data view runtime mappings. Make sure to save SLO again if you add/update run time fields to the data view and if those fields are being used in slo queries. + DataViewId *string `json:"dataViewId,omitempty"` // the KQL query to filter the documents with. Filter *string `json:"filter,omitempty"` // The timestamp field used in the source indice. @@ -72,6 +74,38 @@ func (o *IndicatorPropertiesTimesliceMetricParams) SetIndex(v string) { o.Index = v } +// GetDataViewId returns the DataViewId field value if set, zero value otherwise. +func (o *IndicatorPropertiesTimesliceMetricParams) GetDataViewId() string { + if o == nil || IsNil(o.DataViewId) { + var ret string + return ret + } + return *o.DataViewId +} + +// GetDataViewIdOk returns a tuple with the DataViewId field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *IndicatorPropertiesTimesliceMetricParams) GetDataViewIdOk() (*string, bool) { + if o == nil || IsNil(o.DataViewId) { + return nil, false + } + return o.DataViewId, true +} + +// HasDataViewId returns a boolean if a field has been set. +func (o *IndicatorPropertiesTimesliceMetricParams) HasDataViewId() bool { + if o != nil && !IsNil(o.DataViewId) { + return true + } + + return false +} + +// SetDataViewId gets a reference to the given string and assigns it to the DataViewId field. +func (o *IndicatorPropertiesTimesliceMetricParams) SetDataViewId(v string) { + o.DataViewId = &v +} + // GetFilter returns the Filter field value if set, zero value otherwise. func (o *IndicatorPropertiesTimesliceMetricParams) GetFilter() string { if o == nil || IsNil(o.Filter) { @@ -163,6 +197,9 @@ func (o IndicatorPropertiesTimesliceMetricParams) MarshalJSON() ([]byte, error) func (o IndicatorPropertiesTimesliceMetricParams) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} toSerialize["index"] = o.Index + if !IsNil(o.DataViewId) { + toSerialize["dataViewId"] = o.DataViewId + } if !IsNil(o.Filter) { toSerialize["filter"] = o.Filter } diff --git a/generated/slo/model_indicator_properties_timeslice_metric_params_metric.go b/generated/slo/model_indicator_properties_timeslice_metric_params_metric.go index 89435056d..da0c37817 100644 --- a/generated/slo/model_indicator_properties_timeslice_metric_params_metric.go +++ b/generated/slo/model_indicator_properties_timeslice_metric_params_metric.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_indicator_properties_timeslice_metric_params_metric_metrics_inner.go b/generated/slo/model_indicator_properties_timeslice_metric_params_metric_metrics_inner.go index afbce10b1..0d1154589 100644 --- a/generated/slo/model_indicator_properties_timeslice_metric_params_metric_metrics_inner.go +++ b/generated/slo/model_indicator_properties_timeslice_metric_params_metric_metrics_inner.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_kql_with_filters.go b/generated/slo/model_kql_with_filters.go new file mode 100644 index 000000000..b8c17d0f0 --- /dev/null +++ b/generated/slo/model_kql_with_filters.go @@ -0,0 +1,145 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" + "fmt" +) + +// KqlWithFilters - Defines properties for a filter +type KqlWithFilters struct { + KqlWithFiltersOneOf *KqlWithFiltersOneOf + String *string +} + +// KqlWithFiltersOneOfAsKqlWithFilters is a convenience function that returns KqlWithFiltersOneOf wrapped in KqlWithFilters +func KqlWithFiltersOneOfAsKqlWithFilters(v *KqlWithFiltersOneOf) KqlWithFilters { + return KqlWithFilters{ + KqlWithFiltersOneOf: v, + } +} + +// stringAsKqlWithFilters is a convenience function that returns string wrapped in KqlWithFilters +func StringAsKqlWithFilters(v *string) KqlWithFilters { + return KqlWithFilters{ + String: v, + } +} + +// Unmarshal JSON data into one of the pointers in the struct +func (dst *KqlWithFilters) UnmarshalJSON(data []byte) error { + var err error + match := 0 + // try to unmarshal data into KqlWithFiltersOneOf + err = json.Unmarshal(data, &dst.KqlWithFiltersOneOf) + if err == nil { + jsonKqlWithFiltersOneOf, _ := json.Marshal(dst.KqlWithFiltersOneOf) + if string(jsonKqlWithFiltersOneOf) == "{}" { // empty struct + dst.KqlWithFiltersOneOf = nil + } else { + match++ + } + } else { + dst.KqlWithFiltersOneOf = nil + } + + // try to unmarshal data into String + err = json.Unmarshal(data, &dst.String) + if err == nil { + jsonstring, _ := json.Marshal(dst.String) + if string(jsonstring) == "{}" { // empty struct + dst.String = nil + } else { + match++ + } + } else { + dst.String = nil + } + + if match > 1 { // more than 1 match + // reset to nil + dst.KqlWithFiltersOneOf = nil + dst.String = nil + + return fmt.Errorf("data matches more than one schema in oneOf(KqlWithFilters)") + } else if match == 1 { + return nil // exactly one match + } else { // no match + return fmt.Errorf("data failed to match schemas in oneOf(KqlWithFilters)") + } +} + +// Marshal data from the first non-nil pointers in the struct to JSON +func (src KqlWithFilters) MarshalJSON() ([]byte, error) { + if src.KqlWithFiltersOneOf != nil { + return json.Marshal(&src.KqlWithFiltersOneOf) + } + + if src.String != nil { + return json.Marshal(&src.String) + } + + return nil, nil // no data in oneOf schemas +} + +// Get the actual instance +func (obj *KqlWithFilters) GetActualInstance() interface{} { + if obj == nil { + return nil + } + if obj.KqlWithFiltersOneOf != nil { + return obj.KqlWithFiltersOneOf + } + + if obj.String != nil { + return obj.String + } + + // all schemas are nil + return nil +} + +type NullableKqlWithFilters struct { + value *KqlWithFilters + isSet bool +} + +func (v NullableKqlWithFilters) Get() *KqlWithFilters { + return v.value +} + +func (v *NullableKqlWithFilters) Set(val *KqlWithFilters) { + v.value = val + v.isSet = true +} + +func (v NullableKqlWithFilters) IsSet() bool { + return v.isSet +} + +func (v *NullableKqlWithFilters) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableKqlWithFilters(val *KqlWithFilters) *NullableKqlWithFilters { + return &NullableKqlWithFilters{value: val, isSet: true} +} + +func (v NullableKqlWithFilters) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableKqlWithFilters) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_kql_with_filters_good.go b/generated/slo/model_kql_with_filters_good.go new file mode 100644 index 000000000..1f9cda190 --- /dev/null +++ b/generated/slo/model_kql_with_filters_good.go @@ -0,0 +1,145 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" + "fmt" +) + +// KqlWithFiltersGood - The KQL query used to define the good events. +type KqlWithFiltersGood struct { + KqlWithFiltersOneOf *KqlWithFiltersOneOf + String *string +} + +// KqlWithFiltersOneOfAsKqlWithFiltersGood is a convenience function that returns KqlWithFiltersOneOf wrapped in KqlWithFiltersGood +func KqlWithFiltersOneOfAsKqlWithFiltersGood(v *KqlWithFiltersOneOf) KqlWithFiltersGood { + return KqlWithFiltersGood{ + KqlWithFiltersOneOf: v, + } +} + +// stringAsKqlWithFiltersGood is a convenience function that returns string wrapped in KqlWithFiltersGood +func StringAsKqlWithFiltersGood(v *string) KqlWithFiltersGood { + return KqlWithFiltersGood{ + String: v, + } +} + +// Unmarshal JSON data into one of the pointers in the struct +func (dst *KqlWithFiltersGood) UnmarshalJSON(data []byte) error { + var err error + match := 0 + // try to unmarshal data into KqlWithFiltersOneOf + err = json.Unmarshal(data, &dst.KqlWithFiltersOneOf) + if err == nil { + jsonKqlWithFiltersOneOf, _ := json.Marshal(dst.KqlWithFiltersOneOf) + if string(jsonKqlWithFiltersOneOf) == "{}" { // empty struct + dst.KqlWithFiltersOneOf = nil + } else { + match++ + } + } else { + dst.KqlWithFiltersOneOf = nil + } + + // try to unmarshal data into String + err = json.Unmarshal(data, &dst.String) + if err == nil { + jsonstring, _ := json.Marshal(dst.String) + if string(jsonstring) == "{}" { // empty struct + dst.String = nil + } else { + match++ + } + } else { + dst.String = nil + } + + if match > 1 { // more than 1 match + // reset to nil + dst.KqlWithFiltersOneOf = nil + dst.String = nil + + return fmt.Errorf("data matches more than one schema in oneOf(KqlWithFiltersGood)") + } else if match == 1 { + return nil // exactly one match + } else { // no match + return fmt.Errorf("data failed to match schemas in oneOf(KqlWithFiltersGood)") + } +} + +// Marshal data from the first non-nil pointers in the struct to JSON +func (src KqlWithFiltersGood) MarshalJSON() ([]byte, error) { + if src.KqlWithFiltersOneOf != nil { + return json.Marshal(&src.KqlWithFiltersOneOf) + } + + if src.String != nil { + return json.Marshal(&src.String) + } + + return nil, nil // no data in oneOf schemas +} + +// Get the actual instance +func (obj *KqlWithFiltersGood) GetActualInstance() interface{} { + if obj == nil { + return nil + } + if obj.KqlWithFiltersOneOf != nil { + return obj.KqlWithFiltersOneOf + } + + if obj.String != nil { + return obj.String + } + + // all schemas are nil + return nil +} + +type NullableKqlWithFiltersGood struct { + value *KqlWithFiltersGood + isSet bool +} + +func (v NullableKqlWithFiltersGood) Get() *KqlWithFiltersGood { + return v.value +} + +func (v *NullableKqlWithFiltersGood) Set(val *KqlWithFiltersGood) { + v.value = val + v.isSet = true +} + +func (v NullableKqlWithFiltersGood) IsSet() bool { + return v.isSet +} + +func (v *NullableKqlWithFiltersGood) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableKqlWithFiltersGood(val *KqlWithFiltersGood) *NullableKqlWithFiltersGood { + return &NullableKqlWithFiltersGood{value: val, isSet: true} +} + +func (v NullableKqlWithFiltersGood) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableKqlWithFiltersGood) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_kql_with_filters_one_of.go b/generated/slo/model_kql_with_filters_one_of.go new file mode 100644 index 000000000..177edfc0c --- /dev/null +++ b/generated/slo/model_kql_with_filters_one_of.go @@ -0,0 +1,160 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the KqlWithFiltersOneOf type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &KqlWithFiltersOneOf{} + +// KqlWithFiltersOneOf struct for KqlWithFiltersOneOf +type KqlWithFiltersOneOf struct { + KqlQuery *string `json:"kqlQuery,omitempty"` + Filters []Filter `json:"filters,omitempty"` +} + +// NewKqlWithFiltersOneOf instantiates a new KqlWithFiltersOneOf object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewKqlWithFiltersOneOf() *KqlWithFiltersOneOf { + this := KqlWithFiltersOneOf{} + return &this +} + +// NewKqlWithFiltersOneOfWithDefaults instantiates a new KqlWithFiltersOneOf object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewKqlWithFiltersOneOfWithDefaults() *KqlWithFiltersOneOf { + this := KqlWithFiltersOneOf{} + return &this +} + +// GetKqlQuery returns the KqlQuery field value if set, zero value otherwise. +func (o *KqlWithFiltersOneOf) GetKqlQuery() string { + if o == nil || IsNil(o.KqlQuery) { + var ret string + return ret + } + return *o.KqlQuery +} + +// GetKqlQueryOk returns a tuple with the KqlQuery field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *KqlWithFiltersOneOf) GetKqlQueryOk() (*string, bool) { + if o == nil || IsNil(o.KqlQuery) { + return nil, false + } + return o.KqlQuery, true +} + +// HasKqlQuery returns a boolean if a field has been set. +func (o *KqlWithFiltersOneOf) HasKqlQuery() bool { + if o != nil && !IsNil(o.KqlQuery) { + return true + } + + return false +} + +// SetKqlQuery gets a reference to the given string and assigns it to the KqlQuery field. +func (o *KqlWithFiltersOneOf) SetKqlQuery(v string) { + o.KqlQuery = &v +} + +// GetFilters returns the Filters field value if set, zero value otherwise. +func (o *KqlWithFiltersOneOf) GetFilters() []Filter { + if o == nil || IsNil(o.Filters) { + var ret []Filter + return ret + } + return o.Filters +} + +// GetFiltersOk returns a tuple with the Filters field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *KqlWithFiltersOneOf) GetFiltersOk() ([]Filter, bool) { + if o == nil || IsNil(o.Filters) { + return nil, false + } + return o.Filters, true +} + +// HasFilters returns a boolean if a field has been set. +func (o *KqlWithFiltersOneOf) HasFilters() bool { + if o != nil && !IsNil(o.Filters) { + return true + } + + return false +} + +// SetFilters gets a reference to the given []Filter and assigns it to the Filters field. +func (o *KqlWithFiltersOneOf) SetFilters(v []Filter) { + o.Filters = v +} + +func (o KqlWithFiltersOneOf) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o KqlWithFiltersOneOf) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + if !IsNil(o.KqlQuery) { + toSerialize["kqlQuery"] = o.KqlQuery + } + if !IsNil(o.Filters) { + toSerialize["filters"] = o.Filters + } + return toSerialize, nil +} + +type NullableKqlWithFiltersOneOf struct { + value *KqlWithFiltersOneOf + isSet bool +} + +func (v NullableKqlWithFiltersOneOf) Get() *KqlWithFiltersOneOf { + return v.value +} + +func (v *NullableKqlWithFiltersOneOf) Set(val *KqlWithFiltersOneOf) { + v.value = val + v.isSet = true +} + +func (v NullableKqlWithFiltersOneOf) IsSet() bool { + return v.isSet +} + +func (v *NullableKqlWithFiltersOneOf) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableKqlWithFiltersOneOf(val *KqlWithFiltersOneOf) *NullableKqlWithFiltersOneOf { + return &NullableKqlWithFiltersOneOf{value: val, isSet: true} +} + +func (v NullableKqlWithFiltersOneOf) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableKqlWithFiltersOneOf) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_kql_with_filters_total.go b/generated/slo/model_kql_with_filters_total.go new file mode 100644 index 000000000..c33cac612 --- /dev/null +++ b/generated/slo/model_kql_with_filters_total.go @@ -0,0 +1,145 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" + "fmt" +) + +// KqlWithFiltersTotal - The KQL query used to define all events. +type KqlWithFiltersTotal struct { + KqlWithFiltersOneOf *KqlWithFiltersOneOf + String *string +} + +// KqlWithFiltersOneOfAsKqlWithFiltersTotal is a convenience function that returns KqlWithFiltersOneOf wrapped in KqlWithFiltersTotal +func KqlWithFiltersOneOfAsKqlWithFiltersTotal(v *KqlWithFiltersOneOf) KqlWithFiltersTotal { + return KqlWithFiltersTotal{ + KqlWithFiltersOneOf: v, + } +} + +// stringAsKqlWithFiltersTotal is a convenience function that returns string wrapped in KqlWithFiltersTotal +func StringAsKqlWithFiltersTotal(v *string) KqlWithFiltersTotal { + return KqlWithFiltersTotal{ + String: v, + } +} + +// Unmarshal JSON data into one of the pointers in the struct +func (dst *KqlWithFiltersTotal) UnmarshalJSON(data []byte) error { + var err error + match := 0 + // try to unmarshal data into KqlWithFiltersOneOf + err = json.Unmarshal(data, &dst.KqlWithFiltersOneOf) + if err == nil { + jsonKqlWithFiltersOneOf, _ := json.Marshal(dst.KqlWithFiltersOneOf) + if string(jsonKqlWithFiltersOneOf) == "{}" { // empty struct + dst.KqlWithFiltersOneOf = nil + } else { + match++ + } + } else { + dst.KqlWithFiltersOneOf = nil + } + + // try to unmarshal data into String + err = json.Unmarshal(data, &dst.String) + if err == nil { + jsonstring, _ := json.Marshal(dst.String) + if string(jsonstring) == "{}" { // empty struct + dst.String = nil + } else { + match++ + } + } else { + dst.String = nil + } + + if match > 1 { // more than 1 match + // reset to nil + dst.KqlWithFiltersOneOf = nil + dst.String = nil + + return fmt.Errorf("data matches more than one schema in oneOf(KqlWithFiltersTotal)") + } else if match == 1 { + return nil // exactly one match + } else { // no match + return fmt.Errorf("data failed to match schemas in oneOf(KqlWithFiltersTotal)") + } +} + +// Marshal data from the first non-nil pointers in the struct to JSON +func (src KqlWithFiltersTotal) MarshalJSON() ([]byte, error) { + if src.KqlWithFiltersOneOf != nil { + return json.Marshal(&src.KqlWithFiltersOneOf) + } + + if src.String != nil { + return json.Marshal(&src.String) + } + + return nil, nil // no data in oneOf schemas +} + +// Get the actual instance +func (obj *KqlWithFiltersTotal) GetActualInstance() interface{} { + if obj == nil { + return nil + } + if obj.KqlWithFiltersOneOf != nil { + return obj.KqlWithFiltersOneOf + } + + if obj.String != nil { + return obj.String + } + + // all schemas are nil + return nil +} + +type NullableKqlWithFiltersTotal struct { + value *KqlWithFiltersTotal + isSet bool +} + +func (v NullableKqlWithFiltersTotal) Get() *KqlWithFiltersTotal { + return v.value +} + +func (v *NullableKqlWithFiltersTotal) Set(val *KqlWithFiltersTotal) { + v.value = val + v.isSet = true +} + +func (v NullableKqlWithFiltersTotal) IsSet() bool { + return v.isSet +} + +func (v *NullableKqlWithFiltersTotal) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableKqlWithFiltersTotal(val *KqlWithFiltersTotal) *NullableKqlWithFiltersTotal { + return &NullableKqlWithFiltersTotal{value: val, isSet: true} +} + +func (v NullableKqlWithFiltersTotal) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableKqlWithFiltersTotal) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_objective.go b/generated/slo/model_objective.go index cad95abd2..b9f51f1cd 100644 --- a/generated/slo/model_objective.go +++ b/generated/slo/model_objective.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_settings.go b/generated/slo/model_settings.go index f7cc30ba7..1d8b88ccb 100644 --- a/generated/slo/model_settings.go +++ b/generated/slo/model_settings.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -19,10 +19,14 @@ var _ MappedNullable = &Settings{} // Settings Defines properties for SLO settings. type Settings struct { - // The synch delay to apply to the transform. Default 1m + // The date field that is used to identify new documents in the source. It is strongly recommended to use a field that contains the ingest timestamp. If you use a different field, you might need to set the delay such that it accounts for data transmission delays. When unspecified, we use the indicator timestamp field. + SyncField *string `json:"syncField,omitempty"` + // The time delay in minutes between the current time and the latest source data time. Increasing the value will delay any alerting. The default value is 1 minute. The minimum value is 1m and the maximum is 359m. It should always be greater then source index refresh interval. SyncDelay *string `json:"syncDelay,omitempty"` - // Configure how often the transform runs, default 1m + // The interval between checks for changes in the source data. The minimum value is 1m and the maximum is 59m. The default value is 1 minute. Frequency *string `json:"frequency,omitempty"` + // Start aggregating data from the time the SLO is created, instead of backfilling data from the beginning of the time window. + PreventInitialBackfill *bool `json:"preventInitialBackfill,omitempty"` } // NewSettings instantiates a new Settings object @@ -31,6 +35,12 @@ type Settings struct { // will change when the set of required properties is changed func NewSettings() *Settings { this := Settings{} + var syncDelay string = "1m" + this.SyncDelay = &syncDelay + var frequency string = "1m" + this.Frequency = &frequency + var preventInitialBackfill bool = false + this.PreventInitialBackfill = &preventInitialBackfill return &this } @@ -39,9 +49,47 @@ func NewSettings() *Settings { // but it doesn't guarantee that properties required by API are set func NewSettingsWithDefaults() *Settings { this := Settings{} + var syncDelay string = "1m" + this.SyncDelay = &syncDelay + var frequency string = "1m" + this.Frequency = &frequency + var preventInitialBackfill bool = false + this.PreventInitialBackfill = &preventInitialBackfill return &this } +// GetSyncField returns the SyncField field value if set, zero value otherwise. +func (o *Settings) GetSyncField() string { + if o == nil || IsNil(o.SyncField) { + var ret string + return ret + } + return *o.SyncField +} + +// GetSyncFieldOk returns a tuple with the SyncField field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *Settings) GetSyncFieldOk() (*string, bool) { + if o == nil || IsNil(o.SyncField) { + return nil, false + } + return o.SyncField, true +} + +// HasSyncField returns a boolean if a field has been set. +func (o *Settings) HasSyncField() bool { + if o != nil && !IsNil(o.SyncField) { + return true + } + + return false +} + +// SetSyncField gets a reference to the given string and assigns it to the SyncField field. +func (o *Settings) SetSyncField(v string) { + o.SyncField = &v +} + // GetSyncDelay returns the SyncDelay field value if set, zero value otherwise. func (o *Settings) GetSyncDelay() string { if o == nil || IsNil(o.SyncDelay) { @@ -106,6 +154,38 @@ func (o *Settings) SetFrequency(v string) { o.Frequency = &v } +// GetPreventInitialBackfill returns the PreventInitialBackfill field value if set, zero value otherwise. +func (o *Settings) GetPreventInitialBackfill() bool { + if o == nil || IsNil(o.PreventInitialBackfill) { + var ret bool + return ret + } + return *o.PreventInitialBackfill +} + +// GetPreventInitialBackfillOk returns a tuple with the PreventInitialBackfill field value if set, nil otherwise +// and a boolean to check if the value has been set. +func (o *Settings) GetPreventInitialBackfillOk() (*bool, bool) { + if o == nil || IsNil(o.PreventInitialBackfill) { + return nil, false + } + return o.PreventInitialBackfill, true +} + +// HasPreventInitialBackfill returns a boolean if a field has been set. +func (o *Settings) HasPreventInitialBackfill() bool { + if o != nil && !IsNil(o.PreventInitialBackfill) { + return true + } + + return false +} + +// SetPreventInitialBackfill gets a reference to the given bool and assigns it to the PreventInitialBackfill field. +func (o *Settings) SetPreventInitialBackfill(v bool) { + o.PreventInitialBackfill = &v +} + func (o Settings) MarshalJSON() ([]byte, error) { toSerialize, err := o.ToMap() if err != nil { @@ -116,12 +196,18 @@ func (o Settings) MarshalJSON() ([]byte, error) { func (o Settings) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} + if !IsNil(o.SyncField) { + toSerialize["syncField"] = o.SyncField + } if !IsNil(o.SyncDelay) { toSerialize["syncDelay"] = o.SyncDelay } if !IsNil(o.Frequency) { toSerialize["frequency"] = o.Frequency } + if !IsNil(o.PreventInitialBackfill) { + toSerialize["preventInitialBackfill"] = o.PreventInitialBackfill + } return toSerialize, nil } diff --git a/generated/slo/model_slo_response.go b/generated/slo/model_slo_definition_response.go similarity index 58% rename from generated/slo/model_slo_response.go rename to generated/slo/model_slo_definition_response.go index 6bb82ce1e..dd9e2aa3d 100644 --- a/generated/slo/model_slo_response.go +++ b/generated/slo/model_slo_definition_response.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -14,44 +14,43 @@ import ( "encoding/json" ) -// checks if the SloResponse type satisfies the MappedNullable interface at compile time -var _ MappedNullable = &SloResponse{} +// checks if the SloDefinitionResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &SloDefinitionResponse{} -// SloResponse struct for SloResponse -type SloResponse struct { +// SloDefinitionResponse struct for SloDefinitionResponse +type SloDefinitionResponse struct { // The identifier of the SLO. Id string `json:"id"` // The name of the SLO. Name string `json:"name"` // The description of the SLO. - Description string `json:"description"` - Indicator SloResponseIndicator `json:"indicator"` - TimeWindow TimeWindow `json:"timeWindow"` - BudgetingMethod BudgetingMethod `json:"budgetingMethod"` - Objective Objective `json:"objective"` - Settings Settings `json:"settings"` + Description string `json:"description"` + Indicator SloWithSummaryResponseIndicator `json:"indicator"` + TimeWindow TimeWindow `json:"timeWindow"` + BudgetingMethod BudgetingMethod `json:"budgetingMethod"` + Objective Objective `json:"objective"` + Settings Settings `json:"settings"` // The SLO revision Revision float64 `json:"revision"` - Summary Summary `json:"summary"` // Indicate if the SLO is enabled - Enabled bool `json:"enabled"` - GroupBy SloResponseGroupBy `json:"groupBy"` - // the value derived from the groupBy field, if present, otherwise '*' - InstanceId string `json:"instanceId"` + Enabled bool `json:"enabled"` + GroupBy GroupBy `json:"groupBy"` // List of tags Tags []string `json:"tags"` // The creation date CreatedAt string `json:"createdAt"` // The last update date UpdatedAt string `json:"updatedAt"` + // The internal SLO version + Version float64 `json:"version"` } -// NewSloResponse instantiates a new SloResponse object +// NewSloDefinitionResponse instantiates a new SloDefinitionResponse object // This constructor will assign default values to properties that have it defined, // and makes sure properties required by API are set, but the set of arguments // will change when the set of required properties is changed -func NewSloResponse(id string, name string, description string, indicator SloResponseIndicator, timeWindow TimeWindow, budgetingMethod BudgetingMethod, objective Objective, settings Settings, revision float64, summary Summary, enabled bool, groupBy SloResponseGroupBy, instanceId string, tags []string, createdAt string, updatedAt string) *SloResponse { - this := SloResponse{} +func NewSloDefinitionResponse(id string, name string, description string, indicator SloWithSummaryResponseIndicator, timeWindow TimeWindow, budgetingMethod BudgetingMethod, objective Objective, settings Settings, revision float64, enabled bool, groupBy GroupBy, tags []string, createdAt string, updatedAt string, version float64) *SloDefinitionResponse { + this := SloDefinitionResponse{} this.Id = id this.Name = name this.Description = description @@ -61,26 +60,25 @@ func NewSloResponse(id string, name string, description string, indicator SloRes this.Objective = objective this.Settings = settings this.Revision = revision - this.Summary = summary this.Enabled = enabled this.GroupBy = groupBy - this.InstanceId = instanceId this.Tags = tags this.CreatedAt = createdAt this.UpdatedAt = updatedAt + this.Version = version return &this } -// NewSloResponseWithDefaults instantiates a new SloResponse object +// NewSloDefinitionResponseWithDefaults instantiates a new SloDefinitionResponse object // This constructor will only assign default values to properties that have it defined, // but it doesn't guarantee that properties required by API are set -func NewSloResponseWithDefaults() *SloResponse { - this := SloResponse{} +func NewSloDefinitionResponseWithDefaults() *SloDefinitionResponse { + this := SloDefinitionResponse{} return &this } // GetId returns the Id field value -func (o *SloResponse) GetId() string { +func (o *SloDefinitionResponse) GetId() string { if o == nil { var ret string return ret @@ -91,7 +89,7 @@ func (o *SloResponse) GetId() string { // GetIdOk returns a tuple with the Id field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetIdOk() (*string, bool) { +func (o *SloDefinitionResponse) GetIdOk() (*string, bool) { if o == nil { return nil, false } @@ -99,12 +97,12 @@ func (o *SloResponse) GetIdOk() (*string, bool) { } // SetId sets field value -func (o *SloResponse) SetId(v string) { +func (o *SloDefinitionResponse) SetId(v string) { o.Id = v } // GetName returns the Name field value -func (o *SloResponse) GetName() string { +func (o *SloDefinitionResponse) GetName() string { if o == nil { var ret string return ret @@ -115,7 +113,7 @@ func (o *SloResponse) GetName() string { // GetNameOk returns a tuple with the Name field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetNameOk() (*string, bool) { +func (o *SloDefinitionResponse) GetNameOk() (*string, bool) { if o == nil { return nil, false } @@ -123,12 +121,12 @@ func (o *SloResponse) GetNameOk() (*string, bool) { } // SetName sets field value -func (o *SloResponse) SetName(v string) { +func (o *SloDefinitionResponse) SetName(v string) { o.Name = v } // GetDescription returns the Description field value -func (o *SloResponse) GetDescription() string { +func (o *SloDefinitionResponse) GetDescription() string { if o == nil { var ret string return ret @@ -139,7 +137,7 @@ func (o *SloResponse) GetDescription() string { // GetDescriptionOk returns a tuple with the Description field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetDescriptionOk() (*string, bool) { +func (o *SloDefinitionResponse) GetDescriptionOk() (*string, bool) { if o == nil { return nil, false } @@ -147,14 +145,14 @@ func (o *SloResponse) GetDescriptionOk() (*string, bool) { } // SetDescription sets field value -func (o *SloResponse) SetDescription(v string) { +func (o *SloDefinitionResponse) SetDescription(v string) { o.Description = v } // GetIndicator returns the Indicator field value -func (o *SloResponse) GetIndicator() SloResponseIndicator { +func (o *SloDefinitionResponse) GetIndicator() SloWithSummaryResponseIndicator { if o == nil { - var ret SloResponseIndicator + var ret SloWithSummaryResponseIndicator return ret } @@ -163,7 +161,7 @@ func (o *SloResponse) GetIndicator() SloResponseIndicator { // GetIndicatorOk returns a tuple with the Indicator field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetIndicatorOk() (*SloResponseIndicator, bool) { +func (o *SloDefinitionResponse) GetIndicatorOk() (*SloWithSummaryResponseIndicator, bool) { if o == nil { return nil, false } @@ -171,12 +169,12 @@ func (o *SloResponse) GetIndicatorOk() (*SloResponseIndicator, bool) { } // SetIndicator sets field value -func (o *SloResponse) SetIndicator(v SloResponseIndicator) { +func (o *SloDefinitionResponse) SetIndicator(v SloWithSummaryResponseIndicator) { o.Indicator = v } // GetTimeWindow returns the TimeWindow field value -func (o *SloResponse) GetTimeWindow() TimeWindow { +func (o *SloDefinitionResponse) GetTimeWindow() TimeWindow { if o == nil { var ret TimeWindow return ret @@ -187,7 +185,7 @@ func (o *SloResponse) GetTimeWindow() TimeWindow { // GetTimeWindowOk returns a tuple with the TimeWindow field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetTimeWindowOk() (*TimeWindow, bool) { +func (o *SloDefinitionResponse) GetTimeWindowOk() (*TimeWindow, bool) { if o == nil { return nil, false } @@ -195,12 +193,12 @@ func (o *SloResponse) GetTimeWindowOk() (*TimeWindow, bool) { } // SetTimeWindow sets field value -func (o *SloResponse) SetTimeWindow(v TimeWindow) { +func (o *SloDefinitionResponse) SetTimeWindow(v TimeWindow) { o.TimeWindow = v } // GetBudgetingMethod returns the BudgetingMethod field value -func (o *SloResponse) GetBudgetingMethod() BudgetingMethod { +func (o *SloDefinitionResponse) GetBudgetingMethod() BudgetingMethod { if o == nil { var ret BudgetingMethod return ret @@ -211,7 +209,7 @@ func (o *SloResponse) GetBudgetingMethod() BudgetingMethod { // GetBudgetingMethodOk returns a tuple with the BudgetingMethod field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetBudgetingMethodOk() (*BudgetingMethod, bool) { +func (o *SloDefinitionResponse) GetBudgetingMethodOk() (*BudgetingMethod, bool) { if o == nil { return nil, false } @@ -219,12 +217,12 @@ func (o *SloResponse) GetBudgetingMethodOk() (*BudgetingMethod, bool) { } // SetBudgetingMethod sets field value -func (o *SloResponse) SetBudgetingMethod(v BudgetingMethod) { +func (o *SloDefinitionResponse) SetBudgetingMethod(v BudgetingMethod) { o.BudgetingMethod = v } // GetObjective returns the Objective field value -func (o *SloResponse) GetObjective() Objective { +func (o *SloDefinitionResponse) GetObjective() Objective { if o == nil { var ret Objective return ret @@ -235,7 +233,7 @@ func (o *SloResponse) GetObjective() Objective { // GetObjectiveOk returns a tuple with the Objective field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetObjectiveOk() (*Objective, bool) { +func (o *SloDefinitionResponse) GetObjectiveOk() (*Objective, bool) { if o == nil { return nil, false } @@ -243,12 +241,12 @@ func (o *SloResponse) GetObjectiveOk() (*Objective, bool) { } // SetObjective sets field value -func (o *SloResponse) SetObjective(v Objective) { +func (o *SloDefinitionResponse) SetObjective(v Objective) { o.Objective = v } // GetSettings returns the Settings field value -func (o *SloResponse) GetSettings() Settings { +func (o *SloDefinitionResponse) GetSettings() Settings { if o == nil { var ret Settings return ret @@ -259,7 +257,7 @@ func (o *SloResponse) GetSettings() Settings { // GetSettingsOk returns a tuple with the Settings field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetSettingsOk() (*Settings, bool) { +func (o *SloDefinitionResponse) GetSettingsOk() (*Settings, bool) { if o == nil { return nil, false } @@ -267,12 +265,12 @@ func (o *SloResponse) GetSettingsOk() (*Settings, bool) { } // SetSettings sets field value -func (o *SloResponse) SetSettings(v Settings) { +func (o *SloDefinitionResponse) SetSettings(v Settings) { o.Settings = v } // GetRevision returns the Revision field value -func (o *SloResponse) GetRevision() float64 { +func (o *SloDefinitionResponse) GetRevision() float64 { if o == nil { var ret float64 return ret @@ -283,7 +281,7 @@ func (o *SloResponse) GetRevision() float64 { // GetRevisionOk returns a tuple with the Revision field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetRevisionOk() (*float64, bool) { +func (o *SloDefinitionResponse) GetRevisionOk() (*float64, bool) { if o == nil { return nil, false } @@ -291,36 +289,12 @@ func (o *SloResponse) GetRevisionOk() (*float64, bool) { } // SetRevision sets field value -func (o *SloResponse) SetRevision(v float64) { +func (o *SloDefinitionResponse) SetRevision(v float64) { o.Revision = v } -// GetSummary returns the Summary field value -func (o *SloResponse) GetSummary() Summary { - if o == nil { - var ret Summary - return ret - } - - return o.Summary -} - -// GetSummaryOk returns a tuple with the Summary field value -// and a boolean to check if the value has been set. -func (o *SloResponse) GetSummaryOk() (*Summary, bool) { - if o == nil { - return nil, false - } - return &o.Summary, true -} - -// SetSummary sets field value -func (o *SloResponse) SetSummary(v Summary) { - o.Summary = v -} - // GetEnabled returns the Enabled field value -func (o *SloResponse) GetEnabled() bool { +func (o *SloDefinitionResponse) GetEnabled() bool { if o == nil { var ret bool return ret @@ -331,7 +305,7 @@ func (o *SloResponse) GetEnabled() bool { // GetEnabledOk returns a tuple with the Enabled field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetEnabledOk() (*bool, bool) { +func (o *SloDefinitionResponse) GetEnabledOk() (*bool, bool) { if o == nil { return nil, false } @@ -339,14 +313,14 @@ func (o *SloResponse) GetEnabledOk() (*bool, bool) { } // SetEnabled sets field value -func (o *SloResponse) SetEnabled(v bool) { +func (o *SloDefinitionResponse) SetEnabled(v bool) { o.Enabled = v } // GetGroupBy returns the GroupBy field value -func (o *SloResponse) GetGroupBy() SloResponseGroupBy { +func (o *SloDefinitionResponse) GetGroupBy() GroupBy { if o == nil { - var ret SloResponseGroupBy + var ret GroupBy return ret } @@ -355,7 +329,7 @@ func (o *SloResponse) GetGroupBy() SloResponseGroupBy { // GetGroupByOk returns a tuple with the GroupBy field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetGroupByOk() (*SloResponseGroupBy, bool) { +func (o *SloDefinitionResponse) GetGroupByOk() (*GroupBy, bool) { if o == nil { return nil, false } @@ -363,36 +337,12 @@ func (o *SloResponse) GetGroupByOk() (*SloResponseGroupBy, bool) { } // SetGroupBy sets field value -func (o *SloResponse) SetGroupBy(v SloResponseGroupBy) { +func (o *SloDefinitionResponse) SetGroupBy(v GroupBy) { o.GroupBy = v } -// GetInstanceId returns the InstanceId field value -func (o *SloResponse) GetInstanceId() string { - if o == nil { - var ret string - return ret - } - - return o.InstanceId -} - -// GetInstanceIdOk returns a tuple with the InstanceId field value -// and a boolean to check if the value has been set. -func (o *SloResponse) GetInstanceIdOk() (*string, bool) { - if o == nil { - return nil, false - } - return &o.InstanceId, true -} - -// SetInstanceId sets field value -func (o *SloResponse) SetInstanceId(v string) { - o.InstanceId = v -} - // GetTags returns the Tags field value -func (o *SloResponse) GetTags() []string { +func (o *SloDefinitionResponse) GetTags() []string { if o == nil { var ret []string return ret @@ -403,7 +353,7 @@ func (o *SloResponse) GetTags() []string { // GetTagsOk returns a tuple with the Tags field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetTagsOk() ([]string, bool) { +func (o *SloDefinitionResponse) GetTagsOk() ([]string, bool) { if o == nil { return nil, false } @@ -411,12 +361,12 @@ func (o *SloResponse) GetTagsOk() ([]string, bool) { } // SetTags sets field value -func (o *SloResponse) SetTags(v []string) { +func (o *SloDefinitionResponse) SetTags(v []string) { o.Tags = v } // GetCreatedAt returns the CreatedAt field value -func (o *SloResponse) GetCreatedAt() string { +func (o *SloDefinitionResponse) GetCreatedAt() string { if o == nil { var ret string return ret @@ -427,7 +377,7 @@ func (o *SloResponse) GetCreatedAt() string { // GetCreatedAtOk returns a tuple with the CreatedAt field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetCreatedAtOk() (*string, bool) { +func (o *SloDefinitionResponse) GetCreatedAtOk() (*string, bool) { if o == nil { return nil, false } @@ -435,12 +385,12 @@ func (o *SloResponse) GetCreatedAtOk() (*string, bool) { } // SetCreatedAt sets field value -func (o *SloResponse) SetCreatedAt(v string) { +func (o *SloDefinitionResponse) SetCreatedAt(v string) { o.CreatedAt = v } // GetUpdatedAt returns the UpdatedAt field value -func (o *SloResponse) GetUpdatedAt() string { +func (o *SloDefinitionResponse) GetUpdatedAt() string { if o == nil { var ret string return ret @@ -451,7 +401,7 @@ func (o *SloResponse) GetUpdatedAt() string { // GetUpdatedAtOk returns a tuple with the UpdatedAt field value // and a boolean to check if the value has been set. -func (o *SloResponse) GetUpdatedAtOk() (*string, bool) { +func (o *SloDefinitionResponse) GetUpdatedAtOk() (*string, bool) { if o == nil { return nil, false } @@ -459,11 +409,35 @@ func (o *SloResponse) GetUpdatedAtOk() (*string, bool) { } // SetUpdatedAt sets field value -func (o *SloResponse) SetUpdatedAt(v string) { +func (o *SloDefinitionResponse) SetUpdatedAt(v string) { o.UpdatedAt = v } -func (o SloResponse) MarshalJSON() ([]byte, error) { +// GetVersion returns the Version field value +func (o *SloDefinitionResponse) GetVersion() float64 { + if o == nil { + var ret float64 + return ret + } + + return o.Version +} + +// GetVersionOk returns a tuple with the Version field value +// and a boolean to check if the value has been set. +func (o *SloDefinitionResponse) GetVersionOk() (*float64, bool) { + if o == nil { + return nil, false + } + return &o.Version, true +} + +// SetVersion sets field value +func (o *SloDefinitionResponse) SetVersion(v float64) { + o.Version = v +} + +func (o SloDefinitionResponse) MarshalJSON() ([]byte, error) { toSerialize, err := o.ToMap() if err != nil { return []byte{}, err @@ -471,7 +445,7 @@ func (o SloResponse) MarshalJSON() ([]byte, error) { return json.Marshal(toSerialize) } -func (o SloResponse) ToMap() (map[string]interface{}, error) { +func (o SloDefinitionResponse) ToMap() (map[string]interface{}, error) { toSerialize := map[string]interface{}{} toSerialize["id"] = o.Id toSerialize["name"] = o.Name @@ -482,48 +456,47 @@ func (o SloResponse) ToMap() (map[string]interface{}, error) { toSerialize["objective"] = o.Objective toSerialize["settings"] = o.Settings toSerialize["revision"] = o.Revision - toSerialize["summary"] = o.Summary toSerialize["enabled"] = o.Enabled toSerialize["groupBy"] = o.GroupBy - toSerialize["instanceId"] = o.InstanceId toSerialize["tags"] = o.Tags toSerialize["createdAt"] = o.CreatedAt toSerialize["updatedAt"] = o.UpdatedAt + toSerialize["version"] = o.Version return toSerialize, nil } -type NullableSloResponse struct { - value *SloResponse +type NullableSloDefinitionResponse struct { + value *SloDefinitionResponse isSet bool } -func (v NullableSloResponse) Get() *SloResponse { +func (v NullableSloDefinitionResponse) Get() *SloDefinitionResponse { return v.value } -func (v *NullableSloResponse) Set(val *SloResponse) { +func (v *NullableSloDefinitionResponse) Set(val *SloDefinitionResponse) { v.value = val v.isSet = true } -func (v NullableSloResponse) IsSet() bool { +func (v NullableSloDefinitionResponse) IsSet() bool { return v.isSet } -func (v *NullableSloResponse) Unset() { +func (v *NullableSloDefinitionResponse) Unset() { v.value = nil v.isSet = false } -func NewNullableSloResponse(val *SloResponse) *NullableSloResponse { - return &NullableSloResponse{value: val, isSet: true} +func NewNullableSloDefinitionResponse(val *SloDefinitionResponse) *NullableSloDefinitionResponse { + return &NullableSloDefinitionResponse{value: val, isSet: true} } -func (v NullableSloResponse) MarshalJSON() ([]byte, error) { +func (v NullableSloDefinitionResponse) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } -func (v *NullableSloResponse) UnmarshalJSON(src []byte) error { +func (v *NullableSloDefinitionResponse) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) } diff --git a/generated/slo/model_slo_with_summary_response.go b/generated/slo/model_slo_with_summary_response.go new file mode 100644 index 000000000..c498dee0f --- /dev/null +++ b/generated/slo/model_slo_with_summary_response.go @@ -0,0 +1,557 @@ +/* +SLOs + +OpenAPI schema for SLOs endpoints + +API version: 1.1 +*/ + +// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. + +package slo + +import ( + "encoding/json" +) + +// checks if the SloWithSummaryResponse type satisfies the MappedNullable interface at compile time +var _ MappedNullable = &SloWithSummaryResponse{} + +// SloWithSummaryResponse struct for SloWithSummaryResponse +type SloWithSummaryResponse struct { + // The identifier of the SLO. + Id string `json:"id"` + // The name of the SLO. + Name string `json:"name"` + // The description of the SLO. + Description string `json:"description"` + Indicator SloWithSummaryResponseIndicator `json:"indicator"` + TimeWindow TimeWindow `json:"timeWindow"` + BudgetingMethod BudgetingMethod `json:"budgetingMethod"` + Objective Objective `json:"objective"` + Settings Settings `json:"settings"` + // The SLO revision + Revision float64 `json:"revision"` + Summary Summary `json:"summary"` + // Indicate if the SLO is enabled + Enabled bool `json:"enabled"` + GroupBy GroupBy `json:"groupBy"` + // the value derived from the groupBy field, if present, otherwise '*' + InstanceId string `json:"instanceId"` + // List of tags + Tags []string `json:"tags"` + // The creation date + CreatedAt string `json:"createdAt"` + // The last update date + UpdatedAt string `json:"updatedAt"` + // The internal SLO version + Version float64 `json:"version"` +} + +// NewSloWithSummaryResponse instantiates a new SloWithSummaryResponse object +// This constructor will assign default values to properties that have it defined, +// and makes sure properties required by API are set, but the set of arguments +// will change when the set of required properties is changed +func NewSloWithSummaryResponse(id string, name string, description string, indicator SloWithSummaryResponseIndicator, timeWindow TimeWindow, budgetingMethod BudgetingMethod, objective Objective, settings Settings, revision float64, summary Summary, enabled bool, groupBy GroupBy, instanceId string, tags []string, createdAt string, updatedAt string, version float64) *SloWithSummaryResponse { + this := SloWithSummaryResponse{} + this.Id = id + this.Name = name + this.Description = description + this.Indicator = indicator + this.TimeWindow = timeWindow + this.BudgetingMethod = budgetingMethod + this.Objective = objective + this.Settings = settings + this.Revision = revision + this.Summary = summary + this.Enabled = enabled + this.GroupBy = groupBy + this.InstanceId = instanceId + this.Tags = tags + this.CreatedAt = createdAt + this.UpdatedAt = updatedAt + this.Version = version + return &this +} + +// NewSloWithSummaryResponseWithDefaults instantiates a new SloWithSummaryResponse object +// This constructor will only assign default values to properties that have it defined, +// but it doesn't guarantee that properties required by API are set +func NewSloWithSummaryResponseWithDefaults() *SloWithSummaryResponse { + this := SloWithSummaryResponse{} + return &this +} + +// GetId returns the Id field value +func (o *SloWithSummaryResponse) GetId() string { + if o == nil { + var ret string + return ret + } + + return o.Id +} + +// GetIdOk returns a tuple with the Id field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetIdOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Id, true +} + +// SetId sets field value +func (o *SloWithSummaryResponse) SetId(v string) { + o.Id = v +} + +// GetName returns the Name field value +func (o *SloWithSummaryResponse) GetName() string { + if o == nil { + var ret string + return ret + } + + return o.Name +} + +// GetNameOk returns a tuple with the Name field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetNameOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Name, true +} + +// SetName sets field value +func (o *SloWithSummaryResponse) SetName(v string) { + o.Name = v +} + +// GetDescription returns the Description field value +func (o *SloWithSummaryResponse) GetDescription() string { + if o == nil { + var ret string + return ret + } + + return o.Description +} + +// GetDescriptionOk returns a tuple with the Description field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetDescriptionOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.Description, true +} + +// SetDescription sets field value +func (o *SloWithSummaryResponse) SetDescription(v string) { + o.Description = v +} + +// GetIndicator returns the Indicator field value +func (o *SloWithSummaryResponse) GetIndicator() SloWithSummaryResponseIndicator { + if o == nil { + var ret SloWithSummaryResponseIndicator + return ret + } + + return o.Indicator +} + +// GetIndicatorOk returns a tuple with the Indicator field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetIndicatorOk() (*SloWithSummaryResponseIndicator, bool) { + if o == nil { + return nil, false + } + return &o.Indicator, true +} + +// SetIndicator sets field value +func (o *SloWithSummaryResponse) SetIndicator(v SloWithSummaryResponseIndicator) { + o.Indicator = v +} + +// GetTimeWindow returns the TimeWindow field value +func (o *SloWithSummaryResponse) GetTimeWindow() TimeWindow { + if o == nil { + var ret TimeWindow + return ret + } + + return o.TimeWindow +} + +// GetTimeWindowOk returns a tuple with the TimeWindow field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetTimeWindowOk() (*TimeWindow, bool) { + if o == nil { + return nil, false + } + return &o.TimeWindow, true +} + +// SetTimeWindow sets field value +func (o *SloWithSummaryResponse) SetTimeWindow(v TimeWindow) { + o.TimeWindow = v +} + +// GetBudgetingMethod returns the BudgetingMethod field value +func (o *SloWithSummaryResponse) GetBudgetingMethod() BudgetingMethod { + if o == nil { + var ret BudgetingMethod + return ret + } + + return o.BudgetingMethod +} + +// GetBudgetingMethodOk returns a tuple with the BudgetingMethod field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetBudgetingMethodOk() (*BudgetingMethod, bool) { + if o == nil { + return nil, false + } + return &o.BudgetingMethod, true +} + +// SetBudgetingMethod sets field value +func (o *SloWithSummaryResponse) SetBudgetingMethod(v BudgetingMethod) { + o.BudgetingMethod = v +} + +// GetObjective returns the Objective field value +func (o *SloWithSummaryResponse) GetObjective() Objective { + if o == nil { + var ret Objective + return ret + } + + return o.Objective +} + +// GetObjectiveOk returns a tuple with the Objective field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetObjectiveOk() (*Objective, bool) { + if o == nil { + return nil, false + } + return &o.Objective, true +} + +// SetObjective sets field value +func (o *SloWithSummaryResponse) SetObjective(v Objective) { + o.Objective = v +} + +// GetSettings returns the Settings field value +func (o *SloWithSummaryResponse) GetSettings() Settings { + if o == nil { + var ret Settings + return ret + } + + return o.Settings +} + +// GetSettingsOk returns a tuple with the Settings field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetSettingsOk() (*Settings, bool) { + if o == nil { + return nil, false + } + return &o.Settings, true +} + +// SetSettings sets field value +func (o *SloWithSummaryResponse) SetSettings(v Settings) { + o.Settings = v +} + +// GetRevision returns the Revision field value +func (o *SloWithSummaryResponse) GetRevision() float64 { + if o == nil { + var ret float64 + return ret + } + + return o.Revision +} + +// GetRevisionOk returns a tuple with the Revision field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetRevisionOk() (*float64, bool) { + if o == nil { + return nil, false + } + return &o.Revision, true +} + +// SetRevision sets field value +func (o *SloWithSummaryResponse) SetRevision(v float64) { + o.Revision = v +} + +// GetSummary returns the Summary field value +func (o *SloWithSummaryResponse) GetSummary() Summary { + if o == nil { + var ret Summary + return ret + } + + return o.Summary +} + +// GetSummaryOk returns a tuple with the Summary field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetSummaryOk() (*Summary, bool) { + if o == nil { + return nil, false + } + return &o.Summary, true +} + +// SetSummary sets field value +func (o *SloWithSummaryResponse) SetSummary(v Summary) { + o.Summary = v +} + +// GetEnabled returns the Enabled field value +func (o *SloWithSummaryResponse) GetEnabled() bool { + if o == nil { + var ret bool + return ret + } + + return o.Enabled +} + +// GetEnabledOk returns a tuple with the Enabled field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetEnabledOk() (*bool, bool) { + if o == nil { + return nil, false + } + return &o.Enabled, true +} + +// SetEnabled sets field value +func (o *SloWithSummaryResponse) SetEnabled(v bool) { + o.Enabled = v +} + +// GetGroupBy returns the GroupBy field value +func (o *SloWithSummaryResponse) GetGroupBy() GroupBy { + if o == nil { + var ret GroupBy + return ret + } + + return o.GroupBy +} + +// GetGroupByOk returns a tuple with the GroupBy field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetGroupByOk() (*GroupBy, bool) { + if o == nil { + return nil, false + } + return &o.GroupBy, true +} + +// SetGroupBy sets field value +func (o *SloWithSummaryResponse) SetGroupBy(v GroupBy) { + o.GroupBy = v +} + +// GetInstanceId returns the InstanceId field value +func (o *SloWithSummaryResponse) GetInstanceId() string { + if o == nil { + var ret string + return ret + } + + return o.InstanceId +} + +// GetInstanceIdOk returns a tuple with the InstanceId field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetInstanceIdOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.InstanceId, true +} + +// SetInstanceId sets field value +func (o *SloWithSummaryResponse) SetInstanceId(v string) { + o.InstanceId = v +} + +// GetTags returns the Tags field value +func (o *SloWithSummaryResponse) GetTags() []string { + if o == nil { + var ret []string + return ret + } + + return o.Tags +} + +// GetTagsOk returns a tuple with the Tags field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetTagsOk() ([]string, bool) { + if o == nil { + return nil, false + } + return o.Tags, true +} + +// SetTags sets field value +func (o *SloWithSummaryResponse) SetTags(v []string) { + o.Tags = v +} + +// GetCreatedAt returns the CreatedAt field value +func (o *SloWithSummaryResponse) GetCreatedAt() string { + if o == nil { + var ret string + return ret + } + + return o.CreatedAt +} + +// GetCreatedAtOk returns a tuple with the CreatedAt field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetCreatedAtOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.CreatedAt, true +} + +// SetCreatedAt sets field value +func (o *SloWithSummaryResponse) SetCreatedAt(v string) { + o.CreatedAt = v +} + +// GetUpdatedAt returns the UpdatedAt field value +func (o *SloWithSummaryResponse) GetUpdatedAt() string { + if o == nil { + var ret string + return ret + } + + return o.UpdatedAt +} + +// GetUpdatedAtOk returns a tuple with the UpdatedAt field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetUpdatedAtOk() (*string, bool) { + if o == nil { + return nil, false + } + return &o.UpdatedAt, true +} + +// SetUpdatedAt sets field value +func (o *SloWithSummaryResponse) SetUpdatedAt(v string) { + o.UpdatedAt = v +} + +// GetVersion returns the Version field value +func (o *SloWithSummaryResponse) GetVersion() float64 { + if o == nil { + var ret float64 + return ret + } + + return o.Version +} + +// GetVersionOk returns a tuple with the Version field value +// and a boolean to check if the value has been set. +func (o *SloWithSummaryResponse) GetVersionOk() (*float64, bool) { + if o == nil { + return nil, false + } + return &o.Version, true +} + +// SetVersion sets field value +func (o *SloWithSummaryResponse) SetVersion(v float64) { + o.Version = v +} + +func (o SloWithSummaryResponse) MarshalJSON() ([]byte, error) { + toSerialize, err := o.ToMap() + if err != nil { + return []byte{}, err + } + return json.Marshal(toSerialize) +} + +func (o SloWithSummaryResponse) ToMap() (map[string]interface{}, error) { + toSerialize := map[string]interface{}{} + toSerialize["id"] = o.Id + toSerialize["name"] = o.Name + toSerialize["description"] = o.Description + toSerialize["indicator"] = o.Indicator + toSerialize["timeWindow"] = o.TimeWindow + toSerialize["budgetingMethod"] = o.BudgetingMethod + toSerialize["objective"] = o.Objective + toSerialize["settings"] = o.Settings + toSerialize["revision"] = o.Revision + toSerialize["summary"] = o.Summary + toSerialize["enabled"] = o.Enabled + toSerialize["groupBy"] = o.GroupBy + toSerialize["instanceId"] = o.InstanceId + toSerialize["tags"] = o.Tags + toSerialize["createdAt"] = o.CreatedAt + toSerialize["updatedAt"] = o.UpdatedAt + toSerialize["version"] = o.Version + return toSerialize, nil +} + +type NullableSloWithSummaryResponse struct { + value *SloWithSummaryResponse + isSet bool +} + +func (v NullableSloWithSummaryResponse) Get() *SloWithSummaryResponse { + return v.value +} + +func (v *NullableSloWithSummaryResponse) Set(val *SloWithSummaryResponse) { + v.value = val + v.isSet = true +} + +func (v NullableSloWithSummaryResponse) IsSet() bool { + return v.isSet +} + +func (v *NullableSloWithSummaryResponse) Unset() { + v.value = nil + v.isSet = false +} + +func NewNullableSloWithSummaryResponse(val *SloWithSummaryResponse) *NullableSloWithSummaryResponse { + return &NullableSloWithSummaryResponse{value: val, isSet: true} +} + +func (v NullableSloWithSummaryResponse) MarshalJSON() ([]byte, error) { + return json.Marshal(v.value) +} + +func (v *NullableSloWithSummaryResponse) UnmarshalJSON(src []byte) error { + v.isSet = true + return json.Unmarshal(src, &v.value) +} diff --git a/generated/slo/model_slo_response_indicator.go b/generated/slo/model_slo_with_summary_response_indicator.go similarity index 62% rename from generated/slo/model_slo_response_indicator.go rename to generated/slo/model_slo_with_summary_response_indicator.go index e0147a4dd..4e5fc86e6 100644 --- a/generated/slo/model_slo_response_indicator.go +++ b/generated/slo/model_slo_with_summary_response_indicator.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -15,8 +15,8 @@ import ( "fmt" ) -// SloResponseIndicator - struct for SloResponseIndicator -type SloResponseIndicator struct { +// SloWithSummaryResponseIndicator - struct for SloWithSummaryResponseIndicator +type SloWithSummaryResponseIndicator struct { IndicatorPropertiesApmAvailability *IndicatorPropertiesApmAvailability IndicatorPropertiesApmLatency *IndicatorPropertiesApmLatency IndicatorPropertiesCustomKql *IndicatorPropertiesCustomKql @@ -25,50 +25,50 @@ type SloResponseIndicator struct { IndicatorPropertiesTimesliceMetric *IndicatorPropertiesTimesliceMetric } -// IndicatorPropertiesApmAvailabilityAsSloResponseIndicator is a convenience function that returns IndicatorPropertiesApmAvailability wrapped in SloResponseIndicator -func IndicatorPropertiesApmAvailabilityAsSloResponseIndicator(v *IndicatorPropertiesApmAvailability) SloResponseIndicator { - return SloResponseIndicator{ +// IndicatorPropertiesApmAvailabilityAsSloWithSummaryResponseIndicator is a convenience function that returns IndicatorPropertiesApmAvailability wrapped in SloWithSummaryResponseIndicator +func IndicatorPropertiesApmAvailabilityAsSloWithSummaryResponseIndicator(v *IndicatorPropertiesApmAvailability) SloWithSummaryResponseIndicator { + return SloWithSummaryResponseIndicator{ IndicatorPropertiesApmAvailability: v, } } -// IndicatorPropertiesApmLatencyAsSloResponseIndicator is a convenience function that returns IndicatorPropertiesApmLatency wrapped in SloResponseIndicator -func IndicatorPropertiesApmLatencyAsSloResponseIndicator(v *IndicatorPropertiesApmLatency) SloResponseIndicator { - return SloResponseIndicator{ +// IndicatorPropertiesApmLatencyAsSloWithSummaryResponseIndicator is a convenience function that returns IndicatorPropertiesApmLatency wrapped in SloWithSummaryResponseIndicator +func IndicatorPropertiesApmLatencyAsSloWithSummaryResponseIndicator(v *IndicatorPropertiesApmLatency) SloWithSummaryResponseIndicator { + return SloWithSummaryResponseIndicator{ IndicatorPropertiesApmLatency: v, } } -// IndicatorPropertiesCustomKqlAsSloResponseIndicator is a convenience function that returns IndicatorPropertiesCustomKql wrapped in SloResponseIndicator -func IndicatorPropertiesCustomKqlAsSloResponseIndicator(v *IndicatorPropertiesCustomKql) SloResponseIndicator { - return SloResponseIndicator{ +// IndicatorPropertiesCustomKqlAsSloWithSummaryResponseIndicator is a convenience function that returns IndicatorPropertiesCustomKql wrapped in SloWithSummaryResponseIndicator +func IndicatorPropertiesCustomKqlAsSloWithSummaryResponseIndicator(v *IndicatorPropertiesCustomKql) SloWithSummaryResponseIndicator { + return SloWithSummaryResponseIndicator{ IndicatorPropertiesCustomKql: v, } } -// IndicatorPropertiesCustomMetricAsSloResponseIndicator is a convenience function that returns IndicatorPropertiesCustomMetric wrapped in SloResponseIndicator -func IndicatorPropertiesCustomMetricAsSloResponseIndicator(v *IndicatorPropertiesCustomMetric) SloResponseIndicator { - return SloResponseIndicator{ +// IndicatorPropertiesCustomMetricAsSloWithSummaryResponseIndicator is a convenience function that returns IndicatorPropertiesCustomMetric wrapped in SloWithSummaryResponseIndicator +func IndicatorPropertiesCustomMetricAsSloWithSummaryResponseIndicator(v *IndicatorPropertiesCustomMetric) SloWithSummaryResponseIndicator { + return SloWithSummaryResponseIndicator{ IndicatorPropertiesCustomMetric: v, } } -// IndicatorPropertiesHistogramAsSloResponseIndicator is a convenience function that returns IndicatorPropertiesHistogram wrapped in SloResponseIndicator -func IndicatorPropertiesHistogramAsSloResponseIndicator(v *IndicatorPropertiesHistogram) SloResponseIndicator { - return SloResponseIndicator{ +// IndicatorPropertiesHistogramAsSloWithSummaryResponseIndicator is a convenience function that returns IndicatorPropertiesHistogram wrapped in SloWithSummaryResponseIndicator +func IndicatorPropertiesHistogramAsSloWithSummaryResponseIndicator(v *IndicatorPropertiesHistogram) SloWithSummaryResponseIndicator { + return SloWithSummaryResponseIndicator{ IndicatorPropertiesHistogram: v, } } -// IndicatorPropertiesTimesliceMetricAsSloResponseIndicator is a convenience function that returns IndicatorPropertiesTimesliceMetric wrapped in SloResponseIndicator -func IndicatorPropertiesTimesliceMetricAsSloResponseIndicator(v *IndicatorPropertiesTimesliceMetric) SloResponseIndicator { - return SloResponseIndicator{ +// IndicatorPropertiesTimesliceMetricAsSloWithSummaryResponseIndicator is a convenience function that returns IndicatorPropertiesTimesliceMetric wrapped in SloWithSummaryResponseIndicator +func IndicatorPropertiesTimesliceMetricAsSloWithSummaryResponseIndicator(v *IndicatorPropertiesTimesliceMetric) SloWithSummaryResponseIndicator { + return SloWithSummaryResponseIndicator{ IndicatorPropertiesTimesliceMetric: v, } } // Unmarshal JSON data into one of the pointers in the struct -func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { +func (dst *SloWithSummaryResponseIndicator) UnmarshalJSON(data []byte) error { var err error // use discriminator value to speed up the lookup var jsonDict map[string]interface{} @@ -85,7 +85,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesApmAvailability, return on the first match } else { dst.IndicatorPropertiesApmAvailability = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesApmAvailability: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesApmAvailability: %s", err.Error()) } } @@ -97,7 +97,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesApmLatency, return on the first match } else { dst.IndicatorPropertiesApmLatency = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesApmLatency: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesApmLatency: %s", err.Error()) } } @@ -109,7 +109,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesCustomKql, return on the first match } else { dst.IndicatorPropertiesCustomKql = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesCustomKql: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesCustomKql: %s", err.Error()) } } @@ -121,7 +121,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesCustomMetric, return on the first match } else { dst.IndicatorPropertiesCustomMetric = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesCustomMetric: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesCustomMetric: %s", err.Error()) } } @@ -133,7 +133,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesHistogram, return on the first match } else { dst.IndicatorPropertiesHistogram = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesHistogram: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesHistogram: %s", err.Error()) } } @@ -145,7 +145,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesTimesliceMetric, return on the first match } else { dst.IndicatorPropertiesTimesliceMetric = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesTimesliceMetric: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesTimesliceMetric: %s", err.Error()) } } @@ -157,7 +157,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesApmLatency, return on the first match } else { dst.IndicatorPropertiesApmLatency = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesApmLatency: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesApmLatency: %s", err.Error()) } } @@ -169,7 +169,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesApmAvailability, return on the first match } else { dst.IndicatorPropertiesApmAvailability = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesApmAvailability: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesApmAvailability: %s", err.Error()) } } @@ -181,7 +181,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesHistogram, return on the first match } else { dst.IndicatorPropertiesHistogram = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesHistogram: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesHistogram: %s", err.Error()) } } @@ -193,7 +193,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesCustomKql, return on the first match } else { dst.IndicatorPropertiesCustomKql = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesCustomKql: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesCustomKql: %s", err.Error()) } } @@ -205,7 +205,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesCustomMetric, return on the first match } else { dst.IndicatorPropertiesCustomMetric = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesCustomMetric: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesCustomMetric: %s", err.Error()) } } @@ -217,7 +217,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { return nil // data stored in dst.IndicatorPropertiesTimesliceMetric, return on the first match } else { dst.IndicatorPropertiesTimesliceMetric = nil - return fmt.Errorf("failed to unmarshal SloResponseIndicator as IndicatorPropertiesTimesliceMetric: %s", err.Error()) + return fmt.Errorf("failed to unmarshal SloWithSummaryResponseIndicator as IndicatorPropertiesTimesliceMetric: %s", err.Error()) } } @@ -225,7 +225,7 @@ func (dst *SloResponseIndicator) UnmarshalJSON(data []byte) error { } // Marshal data from the first non-nil pointers in the struct to JSON -func (src SloResponseIndicator) MarshalJSON() ([]byte, error) { +func (src SloWithSummaryResponseIndicator) MarshalJSON() ([]byte, error) { if src.IndicatorPropertiesApmAvailability != nil { return json.Marshal(&src.IndicatorPropertiesApmAvailability) } @@ -254,7 +254,7 @@ func (src SloResponseIndicator) MarshalJSON() ([]byte, error) { } // Get the actual instance -func (obj *SloResponseIndicator) GetActualInstance() interface{} { +func (obj *SloWithSummaryResponseIndicator) GetActualInstance() interface{} { if obj == nil { return nil } @@ -286,38 +286,38 @@ func (obj *SloResponseIndicator) GetActualInstance() interface{} { return nil } -type NullableSloResponseIndicator struct { - value *SloResponseIndicator +type NullableSloWithSummaryResponseIndicator struct { + value *SloWithSummaryResponseIndicator isSet bool } -func (v NullableSloResponseIndicator) Get() *SloResponseIndicator { +func (v NullableSloWithSummaryResponseIndicator) Get() *SloWithSummaryResponseIndicator { return v.value } -func (v *NullableSloResponseIndicator) Set(val *SloResponseIndicator) { +func (v *NullableSloWithSummaryResponseIndicator) Set(val *SloWithSummaryResponseIndicator) { v.value = val v.isSet = true } -func (v NullableSloResponseIndicator) IsSet() bool { +func (v NullableSloWithSummaryResponseIndicator) IsSet() bool { return v.isSet } -func (v *NullableSloResponseIndicator) Unset() { +func (v *NullableSloWithSummaryResponseIndicator) Unset() { v.value = nil v.isSet = false } -func NewNullableSloResponseIndicator(val *SloResponseIndicator) *NullableSloResponseIndicator { - return &NullableSloResponseIndicator{value: val, isSet: true} +func NewNullableSloWithSummaryResponseIndicator(val *SloWithSummaryResponseIndicator) *NullableSloWithSummaryResponseIndicator { + return &NullableSloWithSummaryResponseIndicator{value: val, isSet: true} } -func (v NullableSloResponseIndicator) MarshalJSON() ([]byte, error) { +func (v NullableSloWithSummaryResponseIndicator) MarshalJSON() ([]byte, error) { return json.Marshal(v.value) } -func (v *NullableSloResponseIndicator) UnmarshalJSON(src []byte) error { +func (v *NullableSloWithSummaryResponseIndicator) UnmarshalJSON(src []byte) error { v.isSet = true return json.Unmarshal(src, &v.value) } diff --git a/generated/slo/model_summary.go b/generated/slo/model_summary.go index e04aea289..251539a2e 100644 --- a/generated/slo/model_summary.go +++ b/generated/slo/model_summary.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_summary_status.go b/generated/slo/model_summary_status.go index 4ebe3d0f5..a942168d9 100644 --- a/generated/slo/model_summary_status.go +++ b/generated/slo/model_summary_status.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_time_window.go b/generated/slo/model_time_window.go index e7ca20150..71e713707 100644 --- a/generated/slo/model_time_window.go +++ b/generated/slo/model_time_window.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_timeslice_metric_basic_metric_with_field.go b/generated/slo/model_timeslice_metric_basic_metric_with_field.go index 6f3e23eda..e54d5af2a 100644 --- a/generated/slo/model_timeslice_metric_basic_metric_with_field.go +++ b/generated/slo/model_timeslice_metric_basic_metric_with_field.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_timeslice_metric_doc_count_metric.go b/generated/slo/model_timeslice_metric_doc_count_metric.go index ac35571c3..efcba9abe 100644 --- a/generated/slo/model_timeslice_metric_doc_count_metric.go +++ b/generated/slo/model_timeslice_metric_doc_count_metric.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_timeslice_metric_percentile_metric.go b/generated/slo/model_timeslice_metric_percentile_metric.go index ada902804..0d2844b2c 100644 --- a/generated/slo/model_timeslice_metric_percentile_metric.go +++ b/generated/slo/model_timeslice_metric_percentile_metric.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/model_update_slo_request.go b/generated/slo/model_update_slo_request.go index 242972bca..607c081c8 100644 --- a/generated/slo/model_update_slo_request.go +++ b/generated/slo/model_update_slo_request.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. @@ -28,7 +28,7 @@ type UpdateSloRequest struct { BudgetingMethod *BudgetingMethod `json:"budgetingMethod,omitempty"` Objective *Objective `json:"objective,omitempty"` Settings *Settings `json:"settings,omitempty"` - GroupBy *SloResponseGroupBy `json:"groupBy,omitempty"` + GroupBy *GroupBy `json:"groupBy,omitempty"` // List of tags Tags []string `json:"tags,omitempty"` } @@ -275,9 +275,9 @@ func (o *UpdateSloRequest) SetSettings(v Settings) { } // GetGroupBy returns the GroupBy field value if set, zero value otherwise. -func (o *UpdateSloRequest) GetGroupBy() SloResponseGroupBy { +func (o *UpdateSloRequest) GetGroupBy() GroupBy { if o == nil || IsNil(o.GroupBy) { - var ret SloResponseGroupBy + var ret GroupBy return ret } return *o.GroupBy @@ -285,7 +285,7 @@ func (o *UpdateSloRequest) GetGroupBy() SloResponseGroupBy { // GetGroupByOk returns a tuple with the GroupBy field value if set, nil otherwise // and a boolean to check if the value has been set. -func (o *UpdateSloRequest) GetGroupByOk() (*SloResponseGroupBy, bool) { +func (o *UpdateSloRequest) GetGroupByOk() (*GroupBy, bool) { if o == nil || IsNil(o.GroupBy) { return nil, false } @@ -301,8 +301,8 @@ func (o *UpdateSloRequest) HasGroupBy() bool { return false } -// SetGroupBy gets a reference to the given SloResponseGroupBy and assigns it to the GroupBy field. -func (o *UpdateSloRequest) SetGroupBy(v SloResponseGroupBy) { +// SetGroupBy gets a reference to the given GroupBy and assigns it to the GroupBy field. +func (o *UpdateSloRequest) SetGroupBy(v GroupBy) { o.GroupBy = &v } diff --git a/generated/slo/response.go b/generated/slo/response.go index 2875928ac..2cb1fb208 100644 --- a/generated/slo/response.go +++ b/generated/slo/response.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/generated/slo/utils.go b/generated/slo/utils.go index 7907b693f..826d18aca 100644 --- a/generated/slo/utils.go +++ b/generated/slo/utils.go @@ -3,7 +3,7 @@ SLOs OpenAPI schema for SLOs endpoints -API version: 1.0 +API version: 1.1 */ // Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT. diff --git a/internal/clients/kibana/slo.go b/internal/clients/kibana/slo.go index 58863044f..593c7e8f6 100644 --- a/internal/clients/kibana/slo.go +++ b/internal/clients/kibana/slo.go @@ -77,7 +77,7 @@ func UpdateSlo(ctx context.Context, apiClient *clients.ApiClient, s models.Slo, } req := client.UpdateSloOp(ctxWithAuth, s.SpaceID, s.SloID).KbnXsrf("true").UpdateSloRequest(reqModel) - slo, res, err := req.Execute() + _, res, err := req.Execute() if err != nil { return nil, diag.FromErr(err) @@ -88,7 +88,7 @@ func UpdateSlo(ctx context.Context, apiClient *clients.ApiClient, s models.Slo, return nil, diags } - return sloResponseToModel(s.SpaceID, slo), diag.Diagnostics{} + return &s, diag.Diagnostics{} } func CreateSlo(ctx context.Context, apiClient *clients.ApiClient, s models.Slo, supportsGroupByList bool) (*models.Slo, diag.Diagnostics) { @@ -135,7 +135,7 @@ func CreateSlo(ctx context.Context, apiClient *clients.ApiClient, s models.Slo, return &s, diag.Diagnostics{} } -func responseIndicatorToCreateSloRequestIndicator(s slo.SloResponseIndicator) (slo.CreateSloRequestIndicator, error) { +func responseIndicatorToCreateSloRequestIndicator(s slo.SloWithSummaryResponseIndicator) (slo.CreateSloRequestIndicator, error) { var ret slo.CreateSloRequestIndicator ind := s.GetActualInstance() @@ -166,7 +166,7 @@ func responseIndicatorToCreateSloRequestIndicator(s slo.SloResponseIndicator) (s return ret, nil } -func sloResponseToModel(spaceID string, res *slo.SloResponse) *models.Slo { +func sloResponseToModel(spaceID string, res *slo.SloWithSummaryResponse) *models.Slo { if res == nil { return nil } @@ -186,21 +186,21 @@ func sloResponseToModel(spaceID string, res *slo.SloResponse) *models.Slo { } } -func transformGroupBy(groupBy []string, supportsGroupByList bool) *slo.SloResponseGroupBy { +func transformGroupBy(groupBy []string, supportsGroupByList bool) *slo.GroupBy { if groupBy == nil { return nil } if !supportsGroupByList && len(groupBy) > 0 { - return &slo.SloResponseGroupBy{ + return &slo.GroupBy{ String: &groupBy[0], } } - return &slo.SloResponseGroupBy{ArrayOfString: &groupBy} + return &slo.GroupBy{ArrayOfString: &groupBy} } -func transformGroupByFromResponse(groupBy slo.SloResponseGroupBy) []string { +func transformGroupByFromResponse(groupBy slo.GroupBy) []string { if groupBy.String != nil { return []string{*groupBy.String} } diff --git a/internal/clients/kibana/slo_test.go b/internal/clients/kibana/slo_test.go index 26b7ac0f1..f273d55e4 100644 --- a/internal/clients/kibana/slo_test.go +++ b/internal/clients/kibana/slo_test.go @@ -15,18 +15,18 @@ func Test_sloResponseToModel(t *testing.T) { name string spaceId string sloId string - sloResponse *slo.SloResponse + sloResponse *slo.SloWithSummaryResponse expectedModel *models.Slo }{ { name: "should return a model with the correct values", spaceId: "space-id", sloId: "slo-id", - sloResponse: &slo.SloResponse{ + sloResponse: &slo.SloWithSummaryResponse{ Id: "slo-id", Name: "slo-name", Description: "slo-description", - Indicator: slo.SloResponseIndicator{ + Indicator: slo.SloWithSummaryResponseIndicator{ IndicatorPropertiesApmAvailability: &slo.IndicatorPropertiesApmAvailability{ Type: "sli.apm.transactionErrorRate", Params: slo.IndicatorPropertiesApmAvailabilityParams{ @@ -55,7 +55,7 @@ func Test_sloResponseToModel(t *testing.T) { SloID: "slo-id", Name: "slo-name", Description: "slo-description", - Indicator: slo.SloResponseIndicator{ + Indicator: slo.SloWithSummaryResponseIndicator{ IndicatorPropertiesApmAvailability: &slo.IndicatorPropertiesApmAvailability{ Type: "sli.apm.transactionErrorRate", Params: slo.IndicatorPropertiesApmAvailabilityParams{ @@ -84,11 +84,11 @@ func Test_sloResponseToModel(t *testing.T) { name: "should return tags if available", spaceId: "space-id", sloId: "slo-id", - sloResponse: &slo.SloResponse{ + sloResponse: &slo.SloWithSummaryResponse{ Id: "slo-id", Name: "slo-name", Description: "slo-description", - Indicator: slo.SloResponseIndicator{ + Indicator: slo.SloWithSummaryResponseIndicator{ IndicatorPropertiesApmAvailability: &slo.IndicatorPropertiesApmAvailability{ Type: "sli.apm.transactionErrorRate", Params: slo.IndicatorPropertiesApmAvailabilityParams{ @@ -118,7 +118,7 @@ func Test_sloResponseToModel(t *testing.T) { SloID: "slo-id", Name: "slo-name", Description: "slo-description", - Indicator: slo.SloResponseIndicator{ + Indicator: slo.SloWithSummaryResponseIndicator{ IndicatorPropertiesApmAvailability: &slo.IndicatorPropertiesApmAvailability{ Type: "sli.apm.transactionErrorRate", Params: slo.IndicatorPropertiesApmAvailabilityParams{ diff --git a/internal/kibana/alerting.go b/internal/kibana/alerting.go index b6e5fac13..9aadcbe70 100644 --- a/internal/kibana/alerting.go +++ b/internal/kibana/alerting.go @@ -252,7 +252,7 @@ func getAlertingRuleFromResourceData(d *schema.ResourceData, serverVersion *vers } // Explicitly set rule id if provided, otherwise we'll use the autogenerated ID from the Kibana API response - if ruleID := getOrNilString("rule_id", d); ruleID != nil && *ruleID != "" { + if ruleID := getOrNil[string]("rule_id", d); ruleID != nil && *ruleID != "" { rule.RuleID = *ruleID } @@ -347,7 +347,7 @@ func getActionsFromResourceData(d *schema.ResourceData, serverVersion *version.V NotifyWhen: d.Get(currentAction + ".frequency.0.notify_when").(string), } - if throttle := getOrNilString(currentAction+".frequency.0.throttle", d); throttle != nil && *throttle != "" { + if throttle := getOrNil[string](currentAction+".frequency.0.throttle", d); throttle != nil && *throttle != "" { frequency.Throttle = throttle } diff --git a/internal/kibana/slo.go b/internal/kibana/slo.go index 85ba3e27e..eaf5429e7 100644 --- a/internal/kibana/slo.go +++ b/internal/kibana/slo.go @@ -577,18 +577,16 @@ func getSchema() map[string]*schema.Schema { } } -func getOrNilString(path string, d *schema.ResourceData) *string { - if v, ok := d.GetOk(path); ok { - str := v.(string) - return &str - } - return nil +func getOrNil[T any](path string, d *schema.ResourceData) *T { + return transformOrNil[T](path, d, func(v interface{}) T { + return v.(T) + }) } -func getOrNilFloat(path string, d *schema.ResourceData) *float64 { +func transformOrNil[T any](path string, d *schema.ResourceData, transform func(interface{}) T) *T { if v, ok := d.GetOk(path); ok { - f := v.(float64) - return &f + val := transform(v) + return &val } return nil } @@ -596,7 +594,7 @@ func getOrNilFloat(path string, d *schema.ResourceData) *float64 { func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostics) { var diags diag.Diagnostics - var indicator slo.SloResponseIndicator + var indicator slo.SloWithSummaryResponseIndicator var indicatorType string for key := range indicatorAddressToType { _, exists := d.GetOk(key) @@ -607,21 +605,31 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic switch indicatorType { case "kql_custom_indicator": - indicator = slo.SloResponseIndicator{ + indicator = slo.SloWithSummaryResponseIndicator{ IndicatorPropertiesCustomKql: &slo.IndicatorPropertiesCustomKql{ Type: indicatorAddressToType[indicatorType], Params: slo.IndicatorPropertiesCustomKqlParams{ - Index: d.Get(indicatorType + ".0.index").(string), - Filter: getOrNilString(indicatorType+".0.filter", d), - Good: d.Get(indicatorType + ".0.good").(string), - Total: d.Get(indicatorType + ".0.total").(string), + Index: d.Get(indicatorType + ".0.index").(string), + Filter: transformOrNil[slo.KqlWithFilters]( + indicatorType+".0.filter", d, + func(v interface{}) slo.KqlWithFilters { + return slo.KqlWithFilters{ + String: utils.Pointer(v.(string)), + } + }), + Good: slo.KqlWithFiltersGood{ + String: utils.Pointer(d.Get(indicatorType + ".0.good").(string)), + }, + Total: slo.KqlWithFiltersTotal{ + String: utils.Pointer(d.Get(indicatorType + ".0.total").(string)), + }, TimestampField: d.Get(indicatorType + ".0.timestamp_field").(string), }, }, } case "apm_availability_indicator": - indicator = slo.SloResponseIndicator{ + indicator = slo.SloWithSummaryResponseIndicator{ IndicatorPropertiesApmAvailability: &slo.IndicatorPropertiesApmAvailability{ Type: indicatorAddressToType[indicatorType], Params: slo.IndicatorPropertiesApmAvailabilityParams{ @@ -629,14 +637,14 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic Environment: d.Get(indicatorType + ".0.environment").(string), TransactionType: d.Get(indicatorType + ".0.transaction_type").(string), TransactionName: d.Get(indicatorType + ".0.transaction_name").(string), - Filter: getOrNilString(indicatorType+".0.filter", d), + Filter: getOrNil[string](indicatorType+".0.filter", d), Index: d.Get(indicatorType + ".0.index").(string), }, }, } case "apm_latency_indicator": - indicator = slo.SloResponseIndicator{ + indicator = slo.SloWithSummaryResponseIndicator{ IndicatorPropertiesApmLatency: &slo.IndicatorPropertiesApmLatency{ Type: indicatorAddressToType[indicatorType], Params: slo.IndicatorPropertiesApmLatencyParams{ @@ -644,7 +652,7 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic Environment: d.Get(indicatorType + ".0.environment").(string), TransactionType: d.Get(indicatorType + ".0.transaction_type").(string), TransactionName: d.Get(indicatorType + ".0.transaction_name").(string), - Filter: getOrNilString(indicatorType+".0.filter", d), + Filter: getOrNil[string](indicatorType+".0.filter", d), Index: d.Get(indicatorType + ".0.index").(string), Threshold: float64(d.Get(indicatorType + ".0.threshold").(int)), }, @@ -652,26 +660,26 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic } case "histogram_custom_indicator": - indicator = slo.SloResponseIndicator{ + indicator = slo.SloWithSummaryResponseIndicator{ IndicatorPropertiesHistogram: &slo.IndicatorPropertiesHistogram{ Type: indicatorAddressToType[indicatorType], Params: slo.IndicatorPropertiesHistogramParams{ - Filter: getOrNilString(indicatorType+".0.filter", d), + Filter: getOrNil[string](indicatorType+".0.filter", d), Index: d.Get(indicatorType + ".0.index").(string), TimestampField: d.Get(indicatorType + ".0.timestamp_field").(string), Good: slo.IndicatorPropertiesHistogramParamsGood{ Field: d.Get(indicatorType + ".0.good.0.field").(string), Aggregation: d.Get(indicatorType + ".0.good.0.aggregation").(string), - Filter: getOrNilString(indicatorType+".0.good.0.filter", d), - From: getOrNilFloat(indicatorType+".0.good.0.from", d), - To: getOrNilFloat(indicatorType+".0.good.0.to", d), + Filter: getOrNil[string](indicatorType+".0.good.0.filter", d), + From: getOrNil[float64](indicatorType+".0.good.0.from", d), + To: getOrNil[float64](indicatorType+".0.good.0.to", d), }, Total: slo.IndicatorPropertiesHistogramParamsTotal{ Field: d.Get(indicatorType + ".0.total.0.field").(string), Aggregation: d.Get(indicatorType + ".0.total.0.aggregation").(string), - Filter: getOrNilString(indicatorType+".0.total.0.filter", d), - From: getOrNilFloat(indicatorType+".0.total.0.from", d), - To: getOrNilFloat(indicatorType+".0.total.0.to", d), + Filter: getOrNil[string](indicatorType+".0.total.0.filter", d), + From: getOrNil[float64](indicatorType+".0.total.0.from", d), + To: getOrNil[float64](indicatorType+".0.total.0.to", d), }, }, }, @@ -686,25 +694,25 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic Name: d.Get(indicatorType + ".0.good.0.metrics." + idx + ".name").(string), Field: d.Get(indicatorType + ".0.good.0.metrics." + idx + ".field").(string), Aggregation: d.Get(indicatorType + ".0.good.0.metrics." + idx + ".aggregation").(string), - Filter: getOrNilString(indicatorType+".0.good.0.metrics."+idx+".filter", d), + Filter: getOrNil[string](indicatorType+".0.good.0.metrics."+idx+".filter", d), }) } totalMetricsRaw := d.Get(indicatorType + ".0.total.0.metrics").([]interface{}) - var totalMetrics []slo.IndicatorPropertiesCustomMetricParamsTotalMetricsInner + var totalMetrics []slo.IndicatorPropertiesCustomMetricParamsGoodMetricsInner for n := range totalMetricsRaw { idx := fmt.Sprint(n) - totalMetrics = append(totalMetrics, slo.IndicatorPropertiesCustomMetricParamsTotalMetricsInner{ + totalMetrics = append(totalMetrics, slo.IndicatorPropertiesCustomMetricParamsGoodMetricsInner{ Name: d.Get(indicatorType + ".0.total.0.metrics." + idx + ".name").(string), Field: d.Get(indicatorType + ".0.total.0.metrics." + idx + ".field").(string), Aggregation: d.Get(indicatorType + ".0.total.0.metrics." + idx + ".aggregation").(string), - Filter: getOrNilString(indicatorType+".0.total.0.metrics."+idx+".filter", d), + Filter: getOrNil[string](indicatorType+".0.total.0.metrics."+idx+".filter", d), }) } - indicator = slo.SloResponseIndicator{ + indicator = slo.SloWithSummaryResponseIndicator{ IndicatorPropertiesCustomMetric: &slo.IndicatorPropertiesCustomMetric{ Type: indicatorAddressToType[indicatorType], Params: slo.IndicatorPropertiesCustomMetricParams{ - Filter: getOrNilString(indicatorType+".0.filter", d), + Filter: getOrNil[string](indicatorType+".0.filter", d), Index: d.Get(indicatorType + ".0.index").(string), TimestampField: d.Get(indicatorType + ".0.timestamp_field").(string), Good: slo.IndicatorPropertiesCustomMetricParamsGood{ @@ -756,13 +764,13 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic return models.Slo{}, diag.Errorf("metrics[%d]: unsupported aggregation '%s'", i, agg) } } - indicator = slo.SloResponseIndicator{ + indicator = slo.SloWithSummaryResponseIndicator{ IndicatorPropertiesTimesliceMetric: &slo.IndicatorPropertiesTimesliceMetric{ Type: indicatorAddressToType[indicatorType], Params: slo.IndicatorPropertiesTimesliceMetricParams{ Index: params["index"].(string), TimestampField: params["timestamp_field"].(string), - Filter: getOrNilString("timeslice_metric_indicator.0.filter", d), + Filter: getOrNil[string]("timeslice_metric_indicator.0.filter", d), Metric: slo.IndicatorPropertiesTimesliceMetricParamsMetric{ Metrics: metrics, Equation: metricBlock["equation"].(string), @@ -784,13 +792,13 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic objective := slo.Objective{ Target: d.Get("objective.0.target").(float64), - TimesliceTarget: getOrNilFloat("objective.0.timeslice_target", d), - TimesliceWindow: getOrNilString("objective.0.timeslice_window", d), + TimesliceTarget: getOrNil[float64]("objective.0.timeslice_target", d), + TimesliceWindow: getOrNil[string]("objective.0.timeslice_window", d), } settings := slo.Settings{ - SyncDelay: getOrNilString("settings.0.sync_delay", d), - Frequency: getOrNilString("settings.0.frequency", d), + SyncDelay: getOrNil[string]("settings.0.sync_delay", d), + Frequency: getOrNil[string]("settings.0.frequency", d), } budgetingMethod := slo.BudgetingMethod(d.Get("budgeting_method").(string)) @@ -807,7 +815,7 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic } // Explicitly set SLO object id if provided, otherwise we'll use the autogenerated ID from the Kibana API response - if sloID := getOrNilString("slo_id", d); sloID != nil && *sloID != "" { + if sloID := getOrNil[string]("slo_id", d); sloID != nil && *sloID != "" { slo.SloID = *sloID } @@ -944,9 +952,9 @@ func resourceSloRead(ctx context.Context, d *schema.ResourceData, meta interface params := s.Indicator.IndicatorPropertiesCustomKql.Params indicator = append(indicator, map[string]interface{}{ "index": params.Index, - "filter": params.Filter, - "good": params.Good, - "total": params.Total, + "filter": params.Filter.String, + "good": params.Good.String, + "total": params.Total.String, "timestamp_field": params.TimestampField, }) diff --git a/internal/models/slo.go b/internal/models/slo.go index 2ab58e61e..9072ba025 100644 --- a/internal/models/slo.go +++ b/internal/models/slo.go @@ -8,7 +8,7 @@ type Slo struct { SloID string Name string Description string - Indicator slo.SloResponseIndicator + Indicator slo.SloWithSummaryResponseIndicator TimeWindow slo.TimeWindow BudgetingMethod slo.BudgetingMethod Objective slo.Objective From 69f885769df87a6fd021999031db386b2a2ef0c7 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Mon, 15 Sep 2025 14:55:02 +1000 Subject: [PATCH 52/66] chore(deps): update golang docker tag to v1.25.1 (#1300) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .buildkite/release.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.buildkite/release.yml b/.buildkite/release.yml index 06d518b1e..9f5e0bddd 100644 --- a/.buildkite/release.yml +++ b/.buildkite/release.yml @@ -1,7 +1,7 @@ steps: - label: Release agents: - image: "golang:1.25.0@sha256:5502b0e56fca23feba76dbc5387ba59c593c02ccc2f0f7355871ea9a0852cebe" + image: "golang:1.25.1@sha256:bb979b278ffb8d31c8b07336fd187ef8fafc8766ebeaece524304483ea137e96" cpu: "16" memory: "24G" ephemeralStorage: "20G" From 1cc6bd6737e5e0e107c534a1296529c3b4a9bc82 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Mon, 15 Sep 2025 14:55:35 +1000 Subject: [PATCH 53/66] chore(deps): update actions/setup-go action to v6 (#1301) Co-authored-by: elastic-renovate-prod[bot] <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> --- .github/workflows/copilot-setup-steps.yml | 2 +- .github/workflows/test.yml | 6 +++--- libs/go-kibana-rest/.github/workflows/workflow.yml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/copilot-setup-steps.yml b/.github/workflows/copilot-setup-steps.yml index cc08bc2b9..17d5e3290 100644 --- a/.github/workflows/copilot-setup-steps.yml +++ b/.github/workflows/copilot-setup-steps.yml @@ -24,7 +24,7 @@ jobs: steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 + - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6 with: go-version-file: 'go.mod' cache: true diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index cf08fedc0..4d2eec47b 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -19,7 +19,7 @@ jobs: timeout-minutes: 5 steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 + - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6 with: go-version-file: 'go.mod' cache: true @@ -35,7 +35,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 + - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6 with: go-version-file: 'go.mod' cache: true @@ -130,7 +130,7 @@ jobs: - '9.0.3' steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 + - uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6 with: go-version-file: 'go.mod' cache: true diff --git a/libs/go-kibana-rest/.github/workflows/workflow.yml b/libs/go-kibana-rest/.github/workflows/workflow.yml index 92deb0a42..5540f85fb 100644 --- a/libs/go-kibana-rest/.github/workflows/workflow.yml +++ b/libs/go-kibana-rest/.github/workflows/workflow.yml @@ -20,7 +20,7 @@ jobs: steps: - uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5 - name: Setup Go - uses: actions/setup-go@d35c59abb061a4a6fb18e82ac0862c26744d6ab5 # v5 + uses: actions/setup-go@44694675825211faa026b3c33043df3e48a5fa00 # v6 with: go-version: "1.22" - name: Setup Elasticsearch / Kibana From 6487901a0737936ad21ceff7c2c5ad68b08e84a5 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Tue, 16 Sep 2025 10:35:26 +1000 Subject: [PATCH 54/66] [Feature] Add unenrollment_timeout parameter to Fleet Agent Policy resource (#1306) * Initial plan * Add unenrollment_timeout attribute to Fleet Agent Policy resource Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Update documentation for unenrollment_timeout attribute * Fix floating-point precision issue in duration conversion Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Fix confusing duration conversion logic for timeout attributes Clean up duration conversion in populateFromAPI to make it more readable and consistent. Apply truncation to ensure precision consistency for both InactivityTimeout and UnenrollmentTimeout attributes. Addresses review feedback on confusing duration conversion pattern. Co-authored-by: tobio <444668+tobio@users.noreply.github.com> * Add changelog entry for unenrollment_timeout feature Co-authored-by: tobio <444668+tobio@users.noreply.github.com> --------- Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com> Co-authored-by: tobio <444668+tobio@users.noreply.github.com> --- CHANGELOG.md | 1 + docs/resources/fleet_agent_policy.md | 1 + internal/fleet/agent_policy/models.go | 89 +++++++++++++----- internal/fleet/agent_policy/resource.go | 19 ++-- internal/fleet/agent_policy/resource_test.go | 76 +++++++++++++++ internal/fleet/agent_policy/schema.go | 6 ++ internal/fleet/agent_policy/version_test.go | 97 ++++++++++++++++++++ 7 files changed, 262 insertions(+), 27 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9bfc460cd..32afa2c5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ - Migrate `elasticstack_kibana_action_connector` to the Terraform plugin framework ([#1269](https://github.com/elastic/terraform-provider-elasticstack/pull/1269)) - Migrate `elasticstack_elasticsearch_security_role_mapping` resource and data source to Terraform Plugin Framework ([#1279](https://github.com/elastic/terraform-provider-elasticstack/pull/1279)) - Add support for `inactivity_timeout` in `elasticstack_fleet_agent_policy` ([#641](https://github.com/elastic/terraform-provider-elasticstack/issues/641)) +- Add support for `unenrollment_timeout` in `elasticstack_fleet_agent_policy` ([#1169](https://github.com/elastic/terraform-provider-elasticstack/issues/1169)) ## [0.11.17] - 2025-07-21 diff --git a/docs/resources/fleet_agent_policy.md b/docs/resources/fleet_agent_policy.md index d8252b5b2..dbf85c0fa 100644 --- a/docs/resources/fleet_agent_policy.md +++ b/docs/resources/fleet_agent_policy.md @@ -59,6 +59,7 @@ resource "elasticstack_fleet_agent_policy" "test_policy" { - `skip_destroy` (Boolean) Set to true if you do not wish the agent policy to be deleted at destroy time, and instead just remove the agent policy from the Terraform state. - `supports_agentless` (Boolean) Set to true to enable agentless data collection. - `sys_monitoring` (Boolean) Enable collection of system logs and metrics. +- `unenrollment_timeout` (String) The unenrollment timeout for the agent policy. If an agent is inactive for this period, it will be automatically unenrolled. Supports duration strings (e.g., '30s', '2m', '1h'). ### Read-Only diff --git a/internal/fleet/agent_policy/models.go b/internal/fleet/agent_policy/models.go index 4997d08ba..6c44b35e7 100644 --- a/internal/fleet/agent_policy/models.go +++ b/internal/fleet/agent_policy/models.go @@ -17,9 +17,10 @@ import ( ) type features struct { - SupportsGlobalDataTags bool - SupportsSupportsAgentless bool - SupportsInactivityTimeout bool + SupportsGlobalDataTags bool + SupportsSupportsAgentless bool + SupportsInactivityTimeout bool + SupportsUnenrollmentTimeout bool } type globalDataTagsItemModel struct { @@ -28,22 +29,23 @@ type globalDataTagsItemModel struct { } type agentPolicyModel struct { - ID types.String `tfsdk:"id"` - PolicyID types.String `tfsdk:"policy_id"` - Name types.String `tfsdk:"name"` - Namespace types.String `tfsdk:"namespace"` - Description types.String `tfsdk:"description"` - DataOutputId types.String `tfsdk:"data_output_id"` - MonitoringOutputId types.String `tfsdk:"monitoring_output_id"` - FleetServerHostId types.String `tfsdk:"fleet_server_host_id"` - DownloadSourceId types.String `tfsdk:"download_source_id"` - MonitorLogs types.Bool `tfsdk:"monitor_logs"` - MonitorMetrics types.Bool `tfsdk:"monitor_metrics"` - SysMonitoring types.Bool `tfsdk:"sys_monitoring"` - SkipDestroy types.Bool `tfsdk:"skip_destroy"` - SupportsAgentless types.Bool `tfsdk:"supports_agentless"` - InactivityTimeout customtypes.Duration `tfsdk:"inactivity_timeout"` - GlobalDataTags types.Map `tfsdk:"global_data_tags"` //> globalDataTagsModel + ID types.String `tfsdk:"id"` + PolicyID types.String `tfsdk:"policy_id"` + Name types.String `tfsdk:"name"` + Namespace types.String `tfsdk:"namespace"` + Description types.String `tfsdk:"description"` + DataOutputId types.String `tfsdk:"data_output_id"` + MonitoringOutputId types.String `tfsdk:"monitoring_output_id"` + FleetServerHostId types.String `tfsdk:"fleet_server_host_id"` + DownloadSourceId types.String `tfsdk:"download_source_id"` + MonitorLogs types.Bool `tfsdk:"monitor_logs"` + MonitorMetrics types.Bool `tfsdk:"monitor_metrics"` + SysMonitoring types.Bool `tfsdk:"sys_monitoring"` + SkipDestroy types.Bool `tfsdk:"skip_destroy"` + SupportsAgentless types.Bool `tfsdk:"supports_agentless"` + InactivityTimeout customtypes.Duration `tfsdk:"inactivity_timeout"` + UnenrollmentTimeout customtypes.Duration `tfsdk:"unenrollment_timeout"` + GlobalDataTags types.Map `tfsdk:"global_data_tags"` //> globalDataTagsModel } func (model *agentPolicyModel) populateFromAPI(ctx context.Context, data *kbapi.AgentPolicy) diag.Diagnostics { @@ -79,11 +81,20 @@ func (model *agentPolicyModel) populateFromAPI(ctx context.Context, data *kbapi. model.SupportsAgentless = types.BoolPointerValue(data.SupportsAgentless) if data.InactivityTimeout != nil { // Convert seconds to duration string - d := time.Duration(*data.InactivityTimeout * float32(time.Second)).Truncate(time.Second) - model.InactivityTimeout = customtypes.NewDurationValue(d.String()) + seconds := int64(*data.InactivityTimeout) + d := time.Duration(seconds) * time.Second + model.InactivityTimeout = customtypes.NewDurationValue(d.Truncate(time.Second).String()) } else { model.InactivityTimeout = customtypes.NewDurationNull() } + if data.UnenrollTimeout != nil { + // Convert seconds to duration string + seconds := int64(*data.UnenrollTimeout) + d := time.Duration(seconds) * time.Second + model.UnenrollmentTimeout = customtypes.NewDurationValue(d.Truncate(time.Second).String()) + } else { + model.UnenrollmentTimeout = customtypes.NewDurationNull() + } if utils.Deref(data.GlobalDataTags) != nil { diags := diag.Diagnostics{} var map0 = make(map[string]globalDataTagsItemModel) @@ -216,6 +227,24 @@ func (model *agentPolicyModel) toAPICreateModel(ctx context.Context, feat featur body.InactivityTimeout = &seconds } + if utils.IsKnown(model.UnenrollmentTimeout) { + if !feat.SupportsUnenrollmentTimeout { + return kbapi.PostFleetAgentPoliciesJSONRequestBody{}, diag.Diagnostics{ + diag.NewAttributeErrorDiagnostic( + path.Root("unenrollment_timeout"), + "Unsupported Elasticsearch version", + fmt.Sprintf("Unenrollment timeout is only supported in Elastic Stack %s and above", MinVersionUnenrollmentTimeout), + ), + } + } + duration, diags := model.UnenrollmentTimeout.Parse() + if diags.HasError() { + return kbapi.PostFleetAgentPoliciesJSONRequestBody{}, diags + } + seconds := float32(duration.Seconds()) + body.UnenrollTimeout = &seconds + } + tags, diags := model.convertGlobalDataTags(ctx, feat) if diags.HasError() { return kbapi.PostFleetAgentPoliciesJSONRequestBody{}, diags @@ -276,6 +305,24 @@ func (model *agentPolicyModel) toAPIUpdateModel(ctx context.Context, feat featur body.InactivityTimeout = &seconds } + if utils.IsKnown(model.UnenrollmentTimeout) { + if !feat.SupportsUnenrollmentTimeout { + return kbapi.PutFleetAgentPoliciesAgentpolicyidJSONRequestBody{}, diag.Diagnostics{ + diag.NewAttributeErrorDiagnostic( + path.Root("unenrollment_timeout"), + "Unsupported Elasticsearch version", + fmt.Sprintf("Unenrollment timeout is only supported in Elastic Stack %s and above", MinVersionUnenrollmentTimeout), + ), + } + } + duration, diags := model.UnenrollmentTimeout.Parse() + if diags.HasError() { + return kbapi.PutFleetAgentPoliciesAgentpolicyidJSONRequestBody{}, diags + } + seconds := float32(duration.Seconds()) + body.UnenrollTimeout = &seconds + } + tags, diags := model.convertGlobalDataTags(ctx, feat) if diags.HasError() { return kbapi.PutFleetAgentPoliciesAgentpolicyidJSONRequestBody{}, diags diff --git a/internal/fleet/agent_policy/resource.go b/internal/fleet/agent_policy/resource.go index fbeda8d50..8854fde9f 100644 --- a/internal/fleet/agent_policy/resource.go +++ b/internal/fleet/agent_policy/resource.go @@ -19,9 +19,10 @@ var ( ) var ( - MinVersionGlobalDataTags = version.Must(version.NewVersion("8.15.0")) - MinSupportsAgentlessVersion = version.Must(version.NewVersion("8.15.0")) - MinVersionInactivityTimeout = version.Must(version.NewVersion("8.7.0")) + MinVersionGlobalDataTags = version.Must(version.NewVersion("8.15.0")) + MinSupportsAgentlessVersion = version.Must(version.NewVersion("8.15.0")) + MinVersionInactivityTimeout = version.Must(version.NewVersion("8.7.0")) + MinVersionUnenrollmentTimeout = version.Must(version.NewVersion("8.15.0")) ) // NewResource is a helper function to simplify the provider implementation. @@ -63,9 +64,15 @@ func (r *agentPolicyResource) buildFeatures(ctx context.Context) (features, diag return features{}, utils.FrameworkDiagsFromSDK(diags) } + supportsUnenrollmentTimeout, diags := r.client.EnforceMinVersion(ctx, MinVersionUnenrollmentTimeout) + if diags.HasError() { + return features{}, utils.FrameworkDiagsFromSDK(diags) + } + return features{ - SupportsGlobalDataTags: supportsGDT, - SupportsSupportsAgentless: supportsSupportsAgentless, - SupportsInactivityTimeout: supportsInactivityTimeout, + SupportsGlobalDataTags: supportsGDT, + SupportsSupportsAgentless: supportsSupportsAgentless, + SupportsInactivityTimeout: supportsInactivityTimeout, + SupportsUnenrollmentTimeout: supportsUnenrollmentTimeout, }, nil } diff --git a/internal/fleet/agent_policy/resource_test.go b/internal/fleet/agent_policy/resource_test.go index acd184210..085fa2dd2 100644 --- a/internal/fleet/agent_policy/resource_test.go +++ b/internal/fleet/agent_policy/resource_test.go @@ -156,6 +156,33 @@ func TestAccResourceAgentPolicy(t *testing.T) { resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "inactivity_timeout", "2m"), ), }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(agent_policy.MinVersionUnenrollmentTimeout), + Config: testAccResourceAgentPolicyCreateWithUnenrollmentTimeout(policyName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "name", fmt.Sprintf("Policy %s", policyName)), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "namespace", "default"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "description", "Test Agent Policy with Unenrollment Timeout"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "monitor_logs", "true"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "monitor_metrics", "false"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "skip_destroy", "false"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "unenrollment_timeout", "300s"), + ), + }, + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(agent_policy.MinVersionUnenrollmentTimeout), + Config: testAccResourceAgentPolicyUpdateWithTimeouts(policyName), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "name", fmt.Sprintf("Updated Policy %s", policyName)), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "namespace", "default"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "description", "Test Agent Policy with Both Timeouts"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "monitor_logs", "false"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "monitor_metrics", "true"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "skip_destroy", "false"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "inactivity_timeout", "120s"), + resource.TestCheckResourceAttr("elasticstack_fleet_agent_policy.test_policy", "unenrollment_timeout", "900s"), + ), + }, { SkipFunc: versionutils.CheckIfVersionIsUnsupported(agent_policy.MinVersionGlobalDataTags), Config: testAccResourceAgentPolicyCreateWithGlobalDataTags(policyNameGlobalDataTags, false), @@ -332,6 +359,30 @@ data "elasticstack_fleet_enrollment_tokens" "test_policy" { `, fmt.Sprintf("Policy %s", id)) } +func testAccResourceAgentPolicyCreateWithUnenrollmentTimeout(id string) string { + return fmt.Sprintf(` +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +resource "elasticstack_fleet_agent_policy" "test_policy" { + name = "%s" + namespace = "default" + description = "Test Agent Policy with Unenrollment Timeout" + monitor_logs = true + monitor_metrics = false + skip_destroy = false + unenrollment_timeout = "300s" +} + +data "elasticstack_fleet_enrollment_tokens" "test_policy" { + policy_id = elasticstack_fleet_agent_policy.test_policy.policy_id +} + +`, fmt.Sprintf("Policy %s", id)) +} + func testAccResourceAgentPolicyCreateWithBadGlobalDataTags(id string, skipDestroy bool) string { return fmt.Sprintf(` provider "elasticstack" { @@ -509,3 +560,28 @@ func checkResourceAgentPolicySkipDestroy(s *terraform.State) error { } return nil } + +func testAccResourceAgentPolicyUpdateWithTimeouts(id string) string { + return fmt.Sprintf(` +provider "elasticstack" { + elasticsearch {} + kibana {} +} + +resource "elasticstack_fleet_agent_policy" "test_policy" { + name = "%s" + namespace = "default" + description = "Test Agent Policy with Both Timeouts" + monitor_logs = false + monitor_metrics = true + skip_destroy = false + inactivity_timeout = "120s" + unenrollment_timeout = "900s" +} + +data "elasticstack_fleet_enrollment_tokens" "test_policy" { + policy_id = elasticstack_fleet_agent_policy.test_policy.policy_id +} + +`, fmt.Sprintf("Updated Policy %s", id)) +} diff --git a/internal/fleet/agent_policy/schema.go b/internal/fleet/agent_policy/schema.go index be49763ef..dd98e07c0 100644 --- a/internal/fleet/agent_policy/schema.go +++ b/internal/fleet/agent_policy/schema.go @@ -104,6 +104,12 @@ func getSchema() schema.Schema { Optional: true, CustomType: customtypes.DurationType{}, }, + "unenrollment_timeout": schema.StringAttribute{ + Description: "The unenrollment timeout for the agent policy. If an agent is inactive for this period, it will be automatically unenrolled. Supports duration strings (e.g., '30s', '2m', '1h').", + Computed: true, + Optional: true, + CustomType: customtypes.DurationType{}, + }, "global_data_tags": schema.MapNestedAttribute{ Description: "User-defined data tags to apply to all inputs. Values can be strings (string_value) or numbers (number_value) but not both. Example -- key1 = {string_value = value1}, key2 = {number_value = 42}", NestedObject: schema.NestedAttributeObject{ diff --git a/internal/fleet/agent_policy/version_test.go b/internal/fleet/agent_policy/version_test.go index 8cdc0d133..98d81195a 100644 --- a/internal/fleet/agent_policy/version_test.go +++ b/internal/fleet/agent_policy/version_test.go @@ -29,6 +29,27 @@ func TestMinVersionInactivityTimeout(t *testing.T) { } } +func TestMinVersionUnenrollmentTimeout(t *testing.T) { + // Test that the MinVersionUnenrollmentTimeout constant is set correctly + expected := "8.15.0" + actual := MinVersionUnenrollmentTimeout.String() + if actual != expected { + t.Errorf("Expected MinVersionUnenrollmentTimeout to be '%s', got '%s'", expected, actual) + } + + // Test version comparison - should be greater than 8.14.0 + olderVersion := version.Must(version.NewVersion("8.14.0")) + if MinVersionUnenrollmentTimeout.LessThan(olderVersion) { + t.Errorf("MinVersionUnenrollmentTimeout (%s) should be greater than %s", MinVersionUnenrollmentTimeout.String(), olderVersion.String()) + } + + // Test version comparison - should be less than 8.16.0 + newerVersion := version.Must(version.NewVersion("8.16.0")) + if MinVersionUnenrollmentTimeout.GreaterThan(newerVersion) { + t.Errorf("MinVersionUnenrollmentTimeout (%s) should be less than %s", MinVersionUnenrollmentTimeout.String(), newerVersion.String()) + } +} + func TestInactivityTimeoutVersionValidation(t *testing.T) { ctx := context.Background() @@ -104,3 +125,79 @@ func TestInactivityTimeoutVersionValidation(t *testing.T) { t.Errorf("Did not expect error when inactivity_timeout is not set in update: %v", diags) } } + +func TestUnenrollmentTimeoutVersionValidation(t *testing.T) { + ctx := context.Background() + + // Test case where unenrollment_timeout is not supported (older version) + model := &agentPolicyModel{ + Name: types.StringValue("test"), + Namespace: types.StringValue("default"), + UnenrollmentTimeout: customtypes.NewDurationValue("5m"), + } + + // Create features with unenrollment timeout NOT supported + feat := features{ + SupportsUnenrollmentTimeout: false, + } + + // Test toAPICreateModel - should return error when unenrollment_timeout is used but not supported + _, diags := model.toAPICreateModel(ctx, feat) + if !diags.HasError() { + t.Error("Expected error when using unenrollment_timeout on unsupported version, but got none") + } + + // Check that the error message contains the expected text + found := false + for _, diag := range diags { + if diag.Summary() == "Unsupported Elasticsearch version" { + found = true + break + } + } + if !found { + t.Error("Expected 'Unsupported Elasticsearch version' error, but didn't find it") + } + + // Test toAPIUpdateModel - should return error when unenrollment_timeout is used but not supported + _, diags = model.toAPIUpdateModel(ctx, feat) + if !diags.HasError() { + t.Error("Expected error when using unenrollment_timeout on unsupported version in update, but got none") + } + + // Test case where unenrollment_timeout IS supported (newer version) + featSupported := features{ + SupportsUnenrollmentTimeout: true, + } + + // Test toAPICreateModel - should NOT return error when unenrollment_timeout is supported + _, diags = model.toAPICreateModel(ctx, featSupported) + if diags.HasError() { + t.Errorf("Did not expect error when using unenrollment_timeout on supported version: %v", diags) + } + + // Test toAPIUpdateModel - should NOT return error when unenrollment_timeout is supported + _, diags = model.toAPIUpdateModel(ctx, featSupported) + if diags.HasError() { + t.Errorf("Did not expect error when using unenrollment_timeout on supported version in update: %v", diags) + } + + // Test case where unenrollment_timeout is not set (should not cause validation errors) + modelWithoutTimeout := &agentPolicyModel{ + Name: types.StringValue("test"), + Namespace: types.StringValue("default"), + // UnenrollmentTimeout is not set (null/unknown) + } + + // Test toAPICreateModel - should NOT return error when unenrollment_timeout is not set, even on unsupported version + _, diags = modelWithoutTimeout.toAPICreateModel(ctx, feat) + if diags.HasError() { + t.Errorf("Did not expect error when unenrollment_timeout is not set: %v", diags) + } + + // Test toAPIUpdateModel - should NOT return error when unenrollment_timeout is not set, even on unsupported version + _, diags = modelWithoutTimeout.toAPIUpdateModel(ctx, feat) + if diags.HasError() { + t.Errorf("Did not expect error when unenrollment_timeout is not set in update: %v", diags) + } +} From 385595f3f2f5c0d246acfc0d0dd779ef5d4c521c Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Tue, 16 Sep 2025 10:41:12 +1000 Subject: [PATCH 55/66] Add support for dataViewId (#1305) * Add support for dataViewId * Changelog --- CHANGELOG.md | 2 + docs/resources/kibana_slo.md | 4 + internal/clients/kibana/slo.go | 16 +- internal/kibana/slo.go | 82 ++++- internal/kibana/slo_test.go | 588 +++++++++++++++++++-------------- 5 files changed, 429 insertions(+), 263 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 32afa2c5c..a98032f68 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,8 @@ - Migrate `elasticstack_kibana_action_connector` to the Terraform plugin framework ([#1269](https://github.com/elastic/terraform-provider-elasticstack/pull/1269)) - Migrate `elasticstack_elasticsearch_security_role_mapping` resource and data source to Terraform Plugin Framework ([#1279](https://github.com/elastic/terraform-provider-elasticstack/pull/1279)) - Add support for `inactivity_timeout` in `elasticstack_fleet_agent_policy` ([#641](https://github.com/elastic/terraform-provider-elasticstack/issues/641)) +- [Refactor] Regenerate the SLO client using the current OpenAPI spec ([#1303](https://github.com/elastic/terraform-provider-elasticstack/pull/1303)) +- Add support for `data_view_id` in the `elasticstack_kibana_slo` resource ([#1305](https://github.com/elastic/terraform-provider-elasticstack/pull/1305)) - Add support for `unenrollment_timeout` in `elasticstack_fleet_agent_policy` ([#1169](https://github.com/elastic/terraform-provider-elasticstack/issues/1169)) ## [0.11.17] - 2025-07-21 diff --git a/docs/resources/kibana_slo.md b/docs/resources/kibana_slo.md index 0ce7e5788..61add57c0 100644 --- a/docs/resources/kibana_slo.md +++ b/docs/resources/kibana_slo.md @@ -323,6 +323,7 @@ Required: Optional: +- `data_view_id` (String) Optional data view id to use for this indicator. - `filter` (String) - `timestamp_field` (String) @@ -366,6 +367,7 @@ Required: Optional: +- `data_view_id` (String) Optional data view id to use for this indicator. - `filter` (String) - `good` (String) - `timestamp_field` (String) @@ -383,6 +385,7 @@ Required: Optional: +- `data_view_id` (String) Optional data view id to use for this indicator. - `filter` (String) - `timestamp_field` (String) @@ -453,6 +456,7 @@ Required: Optional: +- `data_view_id` (String) Optional data view id to use for this indicator. - `filter` (String) diff --git a/internal/clients/kibana/slo.go b/internal/clients/kibana/slo.go index 593c7e8f6..08aed32a2 100644 --- a/internal/clients/kibana/slo.go +++ b/internal/clients/kibana/slo.go @@ -28,7 +28,9 @@ func GetSlo(ctx context.Context, apiClient *clients.ApiClient, id, spaceID strin return nil, nil } if err != nil { - return nil, diag.FromErr(err) + diags := diag.FromErr(err) + diags = append(diags, utils.CheckHttpError(res, "unable to create slo with id "+id)...) + return nil, diags } defer res.Body.Close() @@ -46,7 +48,9 @@ func DeleteSlo(ctx context.Context, apiClient *clients.ApiClient, sloId string, req := client.DeleteSloOp(ctxWithAuth, sloId, spaceId).KbnXsrf("true") res, err := req.Execute() if err != nil && res == nil { - return diag.FromErr(err) + diags := diag.FromErr(err) + diags = append(diags, utils.CheckHttpError(res, "unable to create slo with id "+sloId)...) + return diags } defer res.Body.Close() @@ -80,7 +84,9 @@ func UpdateSlo(ctx context.Context, apiClient *clients.ApiClient, s models.Slo, _, res, err := req.Execute() if err != nil { - return nil, diag.FromErr(err) + diags := diag.FromErr(err) + diags = append(diags, utils.CheckHttpError(res, "unable to create slo with id "+s.SloID)...) + return nil, diags } defer res.Body.Close() @@ -122,7 +128,9 @@ func CreateSlo(ctx context.Context, apiClient *clients.ApiClient, s models.Slo, req := client.CreateSloOp(ctxWithAuth, s.SpaceID).KbnXsrf("true").CreateSloRequest(reqModel) sloRes, res, err := req.Execute() if err != nil { - return nil, diag.FromErr(err) + diags := diag.FromErr(err) + diags = append(diags, utils.CheckHttpError(res, "unable to create slo with id "+s.SloID)...) + return nil, diags } defer res.Body.Close() diff --git a/internal/kibana/slo.go b/internal/kibana/slo.go index eaf5429e7..53dc3d19c 100644 --- a/internal/kibana/slo.go +++ b/internal/kibana/slo.go @@ -16,7 +16,10 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) -var SLOSupportsMultipleGroupByMinVersion = version.Must(version.NewVersion("8.14.0")) +var ( + SLOSupportsMultipleGroupByMinVersion = version.Must(version.NewVersion("8.14.0")) + SLOSupportsDataViewIDMinVersion = version.Must(version.NewVersion("8.15.0")) +) func ResourceSlo() *schema.Resource { return &schema.Resource{ @@ -114,6 +117,11 @@ func getSchema() map[string]*schema.Schema { Type: schema.TypeString, Required: true, }, + "data_view_id": { + Type: schema.TypeString, + Optional: true, + Description: "Optional data view id to use for this indicator.", + }, "filter": { Type: schema.TypeString, Optional: true, @@ -215,6 +223,11 @@ func getSchema() map[string]*schema.Schema { Type: schema.TypeString, Required: true, }, + "data_view_id": { + Type: schema.TypeString, + Optional: true, + Description: "Optional data view id to use for this indicator.", + }, "filter": { Type: schema.TypeString, Optional: true, @@ -375,6 +388,11 @@ func getSchema() map[string]*schema.Schema { Type: schema.TypeString, Required: true, }, + "data_view_id": { + Type: schema.TypeString, + Optional: true, + Description: "Optional data view id to use for this indicator.", + }, "filter": { Type: schema.TypeString, Optional: true, @@ -408,6 +426,11 @@ func getSchema() map[string]*schema.Schema { Type: schema.TypeString, Required: true, }, + "data_view_id": { + Type: schema.TypeString, + Optional: true, + Description: "Optional data view id to use for this indicator.", + }, "timestamp_field": { Type: schema.TypeString, Required: true, @@ -609,7 +632,8 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic IndicatorPropertiesCustomKql: &slo.IndicatorPropertiesCustomKql{ Type: indicatorAddressToType[indicatorType], Params: slo.IndicatorPropertiesCustomKqlParams{ - Index: d.Get(indicatorType + ".0.index").(string), + Index: d.Get(indicatorType + ".0.index").(string), + DataViewId: getOrNil[string](indicatorType+".0.data_view_id", d), Filter: transformOrNil[slo.KqlWithFilters]( indicatorType+".0.filter", d, func(v interface{}) slo.KqlWithFilters { @@ -666,6 +690,7 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic Params: slo.IndicatorPropertiesHistogramParams{ Filter: getOrNil[string](indicatorType+".0.filter", d), Index: d.Get(indicatorType + ".0.index").(string), + DataViewId: getOrNil[string](indicatorType+".0.data_view_id", d), TimestampField: d.Get(indicatorType + ".0.timestamp_field").(string), Good: slo.IndicatorPropertiesHistogramParamsGood{ Field: d.Get(indicatorType + ".0.good.0.field").(string), @@ -714,6 +739,7 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic Params: slo.IndicatorPropertiesCustomMetricParams{ Filter: getOrNil[string](indicatorType+".0.filter", d), Index: d.Get(indicatorType + ".0.index").(string), + DataViewId: getOrNil[string](indicatorType+".0.data_view_id", d), TimestampField: d.Get(indicatorType + ".0.timestamp_field").(string), Good: slo.IndicatorPropertiesCustomMetricParamsGood{ Equation: d.Get(indicatorType + ".0.good.0.equation").(string), @@ -769,6 +795,7 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic Type: indicatorAddressToType[indicatorType], Params: slo.IndicatorPropertiesTimesliceMetricParams{ Index: params["index"].(string), + DataViewId: getOrNil[string]("timeslice_metric_indicator.0.data_view_id", d), TimestampField: params["timestamp_field"].(string), Filter: getOrNil[string]("timeslice_metric_indicator.0.filter", d), Metric: slo.IndicatorPropertiesTimesliceMetricParamsMetric{ @@ -850,6 +877,16 @@ func resourceSloCreate(ctx context.Context, d *schema.ResourceData, meta interfa return diags } + // Version check for data_view_id support + if !serverVersion.GreaterThanOrEqual(SLOSupportsDataViewIDMinVersion) { + // Check all indicator types that support data_view_id + for _, indicatorType := range []string{"metric_custom_indicator", "histogram_custom_indicator", "kql_custom_indicator", "timeslice_metric_indicator"} { + if v, ok := d.GetOk(indicatorType + ".0.data_view_id"); ok && v != "" { + return diag.Errorf("data_view_id is not supported for %s on Elastic Stack versions < %s", indicatorType, SLOSupportsDataViewIDMinVersion) + } + } + } + supportsMultipleGroupBy := serverVersion.GreaterThanOrEqual(SLOSupportsMultipleGroupByMinVersion) if len(slo.GroupBy) > 1 && !supportsMultipleGroupBy { return diag.Errorf("multiple group_by fields are not supported in this version of the Elastic Stack. Multiple group_by fields requires %s", SLOSupportsMultipleGroupByMinVersion) @@ -882,6 +919,15 @@ func resourceSloUpdate(ctx context.Context, d *schema.ResourceData, meta interfa return diags } + // Version check for data_view_id support + if !serverVersion.GreaterThanOrEqual(SLOSupportsDataViewIDMinVersion) { + for _, indicatorType := range []string{"metric_custom_indicator", "histogram_custom_indicator", "kql_custom_indicator", "timeslice_metric_indicator"} { + if v, ok := d.GetOk(indicatorType + ".0.data_view_id"); ok && v != "" { + return diag.Errorf("data_view_id is not supported for %s on Elastic Stack versions < %s", indicatorType, SLOSupportsDataViewIDMinVersion) + } + } + } + supportsMultipleGroupBy := serverVersion.GreaterThanOrEqual(SLOSupportsMultipleGroupByMinVersion) if len(slo.GroupBy) > 1 && !supportsMultipleGroupBy { return diag.Errorf("multiple group_by fields are not supported in this version of the Elastic Stack. Multiple group_by fields requires %s", SLOSupportsMultipleGroupByMinVersion) @@ -950,13 +996,17 @@ func resourceSloRead(ctx context.Context, d *schema.ResourceData, meta interface case s.Indicator.IndicatorPropertiesCustomKql != nil: indicatorAddress = indicatorTypeToAddress[s.Indicator.IndicatorPropertiesCustomKql.Type] params := s.Indicator.IndicatorPropertiesCustomKql.Params - indicator = append(indicator, map[string]interface{}{ + indicatorMap := map[string]interface{}{ "index": params.Index, "filter": params.Filter.String, "good": params.Good.String, "total": params.Total.String, "timestamp_field": params.TimestampField, - }) + } + if params.DataViewId != nil { + indicatorMap["data_view_id"] = *params.DataViewId + } + indicator = append(indicator, indicatorMap) case s.Indicator.IndicatorPropertiesHistogram != nil: indicatorAddress = indicatorTypeToAddress[s.Indicator.IndicatorPropertiesHistogram.Type] @@ -975,13 +1025,17 @@ func resourceSloRead(ctx context.Context, d *schema.ResourceData, meta interface "from": params.Total.From, "to": params.Total.To, }} - indicator = append(indicator, map[string]interface{}{ + indicatorMap := map[string]interface{}{ "index": params.Index, "filter": params.Filter, "timestamp_field": params.TimestampField, "good": good, "total": total, - }) + } + if params.DataViewId != nil { + indicatorMap["data_view_id"] = *params.DataViewId + } + indicator = append(indicator, indicatorMap) case s.Indicator.IndicatorPropertiesCustomMetric != nil: indicatorAddress = indicatorTypeToAddress[s.Indicator.IndicatorPropertiesCustomMetric.Type] @@ -1012,13 +1066,17 @@ func resourceSloRead(ctx context.Context, d *schema.ResourceData, meta interface "equation": params.Total.Equation, "metrics": totalMetrics, }} - indicator = append(indicator, map[string]interface{}{ + indicatorMap := map[string]interface{}{ "index": params.Index, "filter": params.Filter, "timestamp_field": params.TimestampField, "good": good, "total": total, - }) + } + if params.DataViewId != nil { + indicatorMap["data_view_id"] = *params.DataViewId + } + indicator = append(indicator, indicatorMap) case s.Indicator.IndicatorPropertiesTimesliceMetric != nil: indicatorAddress = indicatorTypeToAddress[s.Indicator.IndicatorPropertiesTimesliceMetric.Type] @@ -1049,12 +1107,16 @@ func resourceSloRead(ctx context.Context, d *schema.ResourceData, meta interface "comparator": params.Metric.Comparator, "threshold": params.Metric.Threshold, } - indicator = append(indicator, map[string]interface{}{ + indicatorMap := map[string]interface{}{ "index": params.Index, "timestamp_field": params.TimestampField, "filter": params.Filter, "metric": []interface{}{metricBlock}, - }) + } + if params.DataViewId != nil { + indicatorMap["data_view_id"] = *params.DataViewId + } + indicator = append(indicator, indicatorMap) default: return diag.Errorf("indicator not set") diff --git a/internal/kibana/slo_test.go b/internal/kibana/slo_test.go index 23a287c1a..2f1216bed 100644 --- a/internal/kibana/slo_test.go +++ b/internal/kibana/slo_test.go @@ -31,149 +31,224 @@ func TestAccResourceSlo(t *testing.T) { slo8_10Constraints, err := version.NewConstraint(">=8.10.0,!=8.11.0,!=8.11.1,!=8.11.2,!=8.11.3,!=8.11.4") require.NoError(t, err) - sloName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) - resource.Test(t, resource.TestCase{ - PreCheck: func() { acctest.PreCheck(t) }, - CheckDestroy: checkResourceSloDestroy, - ProtoV6ProviderFactories: acctest.Providers, - Steps: []resource.TestStep{ - { - SkipFunc: versionutils.CheckIfVersionMeetsConstraints(slo8_9Constraints), - Config: getSLOConfig(sloVars{name: sloName, indicatorType: "apm_latency_indicator"}), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "name", sloName), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "slo_id", "id-"+sloName), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "description", "fully sick SLO"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.environment", "production"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.service", "my-service"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.transaction_type", "request"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.transaction_name", "GET /sup/dawg"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.index", "my-index-"+sloName), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.threshold", "500"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "time_window.0.duration", "7d"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "time_window.0.type", "rolling"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "budgeting_method", "timeslices"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "objective.0.target", "0.999"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "objective.0.timeslice_target", "0.95"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "objective.0.timeslice_window", "5m"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "settings.0.sync_delay", "1m"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "settings.0.frequency", "1m"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "space_id", "default"), - ), - }, - { //check that name can be updated - SkipFunc: versionutils.CheckIfVersionMeetsConstraints(slo8_9Constraints), - Config: getSLOConfig(sloVars{ - name: fmt.Sprintf("updated-%s", sloName), - indicatorType: "apm_latency_indicator", - }), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "name", fmt.Sprintf("updated-%s", sloName)), - ), - }, - { //check that settings can be updated from api-computed defaults - SkipFunc: versionutils.CheckIfVersionMeetsConstraints(slo8_9Constraints), - Config: getSLOConfig(sloVars{name: sloName, indicatorType: "apm_latency_indicator", settingsEnabled: true}), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "settings.0.sync_delay", "5m"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "settings.0.frequency", "5m"), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionMeetsConstraints(slo8_9Constraints), - Config: getSLOConfig(sloVars{name: sloName, indicatorType: "apm_availability_indicator", settingsEnabled: true}), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_availability_indicator.0.environment", "production"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_availability_indicator.0.service", "my-service"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_availability_indicator.0.transaction_type", "request"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_availability_indicator.0.transaction_name", "GET /sup/dawg"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_availability_indicator.0.index", "my-index-"+sloName), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionMeetsConstraints(slo8_9Constraints), - Config: getSLOConfig(sloVars{name: sloName, indicatorType: "kql_custom_indicator", settingsEnabled: true}), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "kql_custom_indicator.0.index", "my-index-"+sloName), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "kql_custom_indicator.0.good", "latency < 300"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "kql_custom_indicator.0.total", "*"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "kql_custom_indicator.0.filter", "labels.groupId: group-0"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "kql_custom_indicator.0.timestamp_field", "custom_timestamp"), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionMeetsConstraints(slo8_10Constraints), - Config: getSLOConfig(sloVars{name: sloName, indicatorType: "histogram_custom_indicator", settingsEnabled: true}), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.index", "my-index-"+sloName), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.good.0.field", "test"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.good.0.aggregation", "value_count"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.good.0.filter", "latency < 300"), - resource.TestCheckResourceAttrSet("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.good.0.from"), - resource.TestCheckResourceAttrSet("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.good.0.to"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.total.0.field", "test"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.total.0.aggregation", "value_count"), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionMeetsConstraints(slo8_10Constraints), - Config: getSLOConfig(sloVars{ - name: sloName, - indicatorType: "metric_custom_indicator", - settingsEnabled: true, - groupBy: []string{"some.field"}, - }), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.index", "my-index-"+sloName), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.0.name", "A"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.0.aggregation", "sum"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.0.field", "processor.processed"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.1.name", "B"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.1.aggregation", "sum"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.1.field", "processor.processed"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.equation", "A + B"), + for _, testWithDataViewID := range []bool{true, false} { + t.Run("with-data-view-id="+fmt.Sprint(testWithDataViewID), func(t *testing.T) { + dataviewCheckFunc := func(indicator string) resource.TestCheckFunc { + if !testWithDataViewID { + return func(s *terraform.State) error { + return nil + } + } - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.0.name", "A"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.0.aggregation", "sum"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.0.field", "processor.accepted"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.1.name", "B"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.1.aggregation", "sum"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.1.field", "processor.accepted"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.equation", "A + B"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "group_by.#", "1"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "group_by.0", "some.field"), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionMeetsConstraints(slo8_10Constraints), - Config: getSLOConfig(sloVars{ - name: sloName, - indicatorType: "metric_custom_indicator", - settingsEnabled: true, - tags: []string{"tag-1", "another_tag"}, - }), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "tags.0", "tag-1"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "tags.1", "another_tag"), - ), - }, - { - SkipFunc: versionutils.CheckIfVersionIsUnsupported(sloTimesliceMetricsMinVersion), - Config: getSLOConfig(sloVars{ - name: sloName, - indicatorType: "timeslice_metric_indicator", - settingsEnabled: true, - tags: []string{"tag-1", "another_tag"}, - }), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "timeslice_metric_indicator.0.index", "my-index-"+sloName), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "timeslice_metric_indicator.0.metric.0.metrics.0.name", "A"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "timeslice_metric_indicator.0.metric.0.metrics.0.aggregation", "sum"), - resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "timeslice_metric_indicator.0.metric.0.equation", "A"), - ), - }, - }, - }) + return resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", indicator+".0.data_view_id", "my-data-view-id") + } + sloName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + CheckDestroy: checkResourceSloDestroy, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + SkipFunc: func() (bool, error) { + if !testWithDataViewID { + return versionutils.CheckIfVersionMeetsConstraints(slo8_9Constraints)() + } + + return versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsDataViewIDMinVersion)() + }, + Config: getSLOConfig(sloVars{name: sloName, indicatorType: "apm_latency_indicator", includeDataViewID: testWithDataViewID}), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "name", sloName), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "slo_id", "id-"+sloName), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "description", "fully sick SLO"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.environment", "production"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.service", "my-service"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.transaction_type", "request"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.transaction_name", "GET /sup/dawg"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.index", "my-index-"+sloName), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_latency_indicator.0.threshold", "500"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "time_window.0.duration", "7d"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "time_window.0.type", "rolling"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "budgeting_method", "timeslices"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "objective.0.target", "0.999"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "objective.0.timeslice_target", "0.95"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "objective.0.timeslice_window", "5m"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "settings.0.sync_delay", "1m"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "settings.0.frequency", "1m"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "space_id", "default"), + ), + }, + { //check that name can be updated + SkipFunc: func() (bool, error) { + if !testWithDataViewID { + return versionutils.CheckIfVersionMeetsConstraints(slo8_9Constraints)() + } + + return versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsDataViewIDMinVersion)() + }, + Config: getSLOConfig(sloVars{ + name: fmt.Sprintf("updated-%s", sloName), + indicatorType: "apm_latency_indicator", + includeDataViewID: testWithDataViewID, + }), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "name", fmt.Sprintf("updated-%s", sloName)), + ), + }, + { //check that settings can be updated from api-computed defaults + SkipFunc: func() (bool, error) { + if !testWithDataViewID { + return versionutils.CheckIfVersionMeetsConstraints(slo8_9Constraints)() + } + + return versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsDataViewIDMinVersion)() + }, + Config: getSLOConfig(sloVars{name: sloName, indicatorType: "apm_latency_indicator", settingsEnabled: true, includeDataViewID: testWithDataViewID}), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "settings.0.sync_delay", "5m"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "settings.0.frequency", "5m"), + ), + }, + { + SkipFunc: func() (bool, error) { + if !testWithDataViewID { + return versionutils.CheckIfVersionMeetsConstraints(slo8_9Constraints)() + } + + return versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsDataViewIDMinVersion)() + }, + Config: getSLOConfig(sloVars{name: sloName, indicatorType: "apm_availability_indicator", settingsEnabled: true, includeDataViewID: testWithDataViewID}), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_availability_indicator.0.environment", "production"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_availability_indicator.0.service", "my-service"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_availability_indicator.0.transaction_type", "request"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_availability_indicator.0.transaction_name", "GET /sup/dawg"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "apm_availability_indicator.0.index", "my-index-"+sloName), + ), + }, + { + SkipFunc: func() (bool, error) { + if !testWithDataViewID { + return versionutils.CheckIfVersionMeetsConstraints(slo8_9Constraints)() + } + + return versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsDataViewIDMinVersion)() + }, + Config: getSLOConfig(sloVars{name: sloName, indicatorType: "kql_custom_indicator", settingsEnabled: true, includeDataViewID: testWithDataViewID}), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "kql_custom_indicator.0.index", "my-index-"+sloName), + dataviewCheckFunc("kql_custom_indicator"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "kql_custom_indicator.0.good", "latency < 300"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "kql_custom_indicator.0.total", "*"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "kql_custom_indicator.0.filter", "labels.groupId: group-0"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "kql_custom_indicator.0.timestamp_field", "custom_timestamp"), + ), + }, + { + SkipFunc: func() (bool, error) { + if !testWithDataViewID { + return versionutils.CheckIfVersionMeetsConstraints(slo8_10Constraints)() + } + + return versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsDataViewIDMinVersion)() + }, + Config: getSLOConfig(sloVars{name: sloName, indicatorType: "histogram_custom_indicator", settingsEnabled: true, includeDataViewID: testWithDataViewID}), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.index", "my-index-"+sloName), + dataviewCheckFunc("histogram_custom_indicator"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.good.0.field", "test"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.good.0.aggregation", "value_count"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.good.0.filter", "latency < 300"), + resource.TestCheckResourceAttrSet("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.good.0.from"), + resource.TestCheckResourceAttrSet("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.good.0.to"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.total.0.field", "test"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "histogram_custom_indicator.0.total.0.aggregation", "value_count"), + ), + }, + { + SkipFunc: func() (bool, error) { + if !testWithDataViewID { + return versionutils.CheckIfVersionMeetsConstraints(slo8_10Constraints)() + } + + return versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsDataViewIDMinVersion)() + }, + Config: getSLOConfig(sloVars{ + name: sloName, + indicatorType: "metric_custom_indicator", + settingsEnabled: true, + groupBy: []string{"some.field"}, + includeDataViewID: testWithDataViewID, + }), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.index", "my-index-"+sloName), + dataviewCheckFunc("metric_custom_indicator"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.0.name", "A"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.0.aggregation", "sum"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.0.field", "processor.processed"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.1.name", "B"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.1.aggregation", "sum"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.metrics.1.field", "processor.processed"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.good.0.equation", "A + B"), + + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.0.name", "A"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.0.aggregation", "sum"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.0.field", "processor.accepted"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.1.name", "B"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.1.aggregation", "sum"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.metrics.1.field", "processor.accepted"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.total.0.equation", "A + B"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "group_by.#", "1"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "group_by.0", "some.field"), + ), + }, + { + SkipFunc: func() (bool, error) { + if !testWithDataViewID { + return versionutils.CheckIfVersionMeetsConstraints(slo8_10Constraints)() + } + + return versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsDataViewIDMinVersion)() + }, + Config: getSLOConfig(sloVars{ + name: sloName, + indicatorType: "metric_custom_indicator", + settingsEnabled: true, + tags: []string{"tag-1", "another_tag"}, + includeDataViewID: testWithDataViewID, + }), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "tags.0", "tag-1"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "tags.1", "another_tag"), + ), + }, + { + SkipFunc: func() (bool, error) { + if !testWithDataViewID { + return versionutils.CheckIfVersionIsUnsupported(sloTimesliceMetricsMinVersion)() + } + + return versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsDataViewIDMinVersion)() + }, + Config: getSLOConfig(sloVars{ + name: sloName, + indicatorType: "timeslice_metric_indicator", + settingsEnabled: true, + tags: []string{"tag-1", "another_tag"}, + includeDataViewID: testWithDataViewID, + }), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "timeslice_metric_indicator.0.index", "my-index-"+sloName), + dataviewCheckFunc("timeslice_metric_indicator"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "timeslice_metric_indicator.0.metric.0.metrics.0.name", "A"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "timeslice_metric_indicator.0.metric.0.metrics.0.aggregation", "sum"), + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "timeslice_metric_indicator.0.metric.0.equation", "A"), + ), + }, + }, + }) + }) + } } func TestAccResourceSloGroupBy(t *testing.T) { @@ -197,6 +272,8 @@ func TestAccResourceSloGroupBy(t *testing.T) { settingsEnabled: true, groupBy: []string{"some.field"}, useSingleElementGroupBy: true, + includeDataViewID: false, + // Do not set data_view_id for this test, as it is not supported in this provider version }), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.index", "my-index-"+sloName), @@ -222,10 +299,12 @@ func TestAccResourceSloGroupBy(t *testing.T) { ProtoV6ProviderFactories: acctest.Providers, SkipFunc: versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsMultipleGroupByMinVersion), Config: getSLOConfig(sloVars{ - name: sloName, - indicatorType: "metric_custom_indicator", - settingsEnabled: true, - groupBy: []string{"some.field", "some.other.field"}, + name: sloName, + indicatorType: "metric_custom_indicator", + settingsEnabled: true, + groupBy: []string{"some.field", "some.other.field"}, + includeDataViewID: false, + // Do not set data_view_id for this test, as it is not supported in this provider version }), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.index", "my-index-"+sloName), @@ -720,6 +799,7 @@ type sloVars struct { tags []string groupBy []string useSingleElementGroupBy bool + includeDataViewID bool } func getSLOConfig(vars sloVars) string { @@ -757,6 +837,11 @@ func getSLOConfig(vars sloVars) string { groupByOption = "" } + dataViewID := "" + if vars.includeDataViewID { + dataViewID = "data_view_id = \"my-data-view-id\"" + } + configTemplate := ` provider "elasticstack" { elasticsearch {} @@ -804,126 +889,131 @@ func getSLOConfig(vars sloVars) string { switch indicatorType { case "apm_latency_indicator": indicator = fmt.Sprintf(` - apm_latency_indicator { - environment = "production" - service = "my-service" - transaction_type = "request" - transaction_name = "GET /sup/dawg" - index = "my-index-%s" - threshold = 500 - } - `, vars.name) + apm_latency_indicator { + environment = "production" + service = "my-service" + transaction_type = "request" + transaction_name = "GET /sup/dawg" + index = "my-index-%s" + threshold = 500 + } + `, vars.name) case "apm_availability_indicator": indicator = fmt.Sprintf(` - apm_availability_indicator { - environment = "production" - service = "my-service" - transaction_type = "request" - transaction_name = "GET /sup/dawg" - index = "my-index-%s" - } - `, vars.name) + apm_availability_indicator { + environment = "production" + service = "my-service" + transaction_type = "request" + transaction_name = "GET /sup/dawg" + index = "my-index-%s" + } + `, vars.name) case "kql_custom_indicator": indicator = fmt.Sprintf(` - kql_custom_indicator { - index = "my-index-%s" - good = "latency < 300" - total = "*" - filter = "labels.groupId: group-0" - timestamp_field = "custom_timestamp" - } - `, vars.name) + kql_custom_indicator { + index = "my-index-%s" + %s + good = "latency < 300" + total = "*" + filter = "labels.groupId: group-0" + timestamp_field = "custom_timestamp" + } + `, vars.name, dataViewID) case "histogram_custom_indicator": indicator = fmt.Sprintf(` - histogram_custom_indicator { - index = "my-index-%s" - good { - field = "test" - aggregation = "value_count" - filter = "latency < 300" - } - total { - field = "test" - aggregation = "value_count" - } - filter = "labels.groupId: group-0" - timestamp_field = "custom_timestamp" - } - `, vars.name) + histogram_custom_indicator { + index = "my-index-%s" + %s + good { + field = "test" + aggregation = "value_count" + filter = "latency < 300" + } + total { + field = "test" + aggregation = "value_count" + } + filter = "labels.groupId: group-0" + timestamp_field = "custom_timestamp" + } + `, vars.name, dataViewID) case "histogram_custom_indicator_agg_fail": indicator = fmt.Sprintf(` - histogram_custom_indicator { - index = "my-index-%s" - good { - field = "test" - aggregation = "supdawg" - filter = "latency < 300" - from = 0 - to = 10 - } - total { - field = "test" - aggregation = "supdawg" - } - filter = "labels.groupId: group-0" - timestamp_field = "custom_timestamp" - } - `, vars.name) + histogram_custom_indicator { + index = "my-index-%s" + %s + good { + field = "test" + aggregation = "supdawg" + filter = "latency < 300" + from = 0 + to = 10 + } + total { + field = "test" + aggregation = "supdawg" + } + filter = "labels.groupId: group-0" + timestamp_field = "custom_timestamp" + } + `, vars.name, dataViewID) case "metric_custom_indicator": indicator = fmt.Sprintf(` - metric_custom_indicator { - index = "my-index-%s" - good { - metrics { - name = "A" - aggregation = "sum" - field = "processor.processed" - } - metrics { - name = "B" - aggregation = "sum" - field = "processor.processed" - } - equation = "A + B" - } - - total { - metrics { - name = "A" - aggregation = "sum" - field = "processor.accepted" - } - metrics { - name = "B" - aggregation = "sum" - field = "processor.accepted" - } - equation = "A + B" - } - } - `, vars.name) + metric_custom_indicator { + index = "my-index-%s" + %s + good { + metrics { + name = "A" + aggregation = "sum" + field = "processor.processed" + } + metrics { + name = "B" + aggregation = "sum" + field = "processor.processed" + } + equation = "A + B" + } + + total { + metrics { + name = "A" + aggregation = "sum" + field = "processor.accepted" + } + metrics { + name = "B" + aggregation = "sum" + field = "processor.accepted" + } + equation = "A + B" + } + } + `, vars.name, dataViewID) case "timeslice_metric_indicator": indicator = fmt.Sprintf(` - timeslice_metric_indicator { - index = "my-index-%s" - timestamp_field = "@timestamp" - metric { - metrics { - name = "A" - aggregation = "sum" - field = "latency" - } - equation = "A" - comparator = "GT" - threshold = 100 - } - } - `, vars.name) + timeslice_metric_indicator { + index = "my-index-%s" + %s + timestamp_field = "@timestamp" + metric { + metrics { + name = "A" + aggregation = "sum" + field = "latency" + } + equation = "A" + comparator = "GT" + threshold = 100 + } + } + `, vars.name, dataViewID) } return indicator } From 697a7f4aaea7effab390236be2fcf0add0293680 Mon Sep 17 00:00:00 2001 From: Jason Rhodes Date: Mon, 15 Sep 2025 23:40:14 -0400 Subject: [PATCH 56/66] Add optional preventInitialBackfill for SLO API (#1071) * Add optional preventInitialBackfill for SLO API Docs for this option are here: https://www.elastic.co/docs/api/doc/serverless/operation/operation-createsloop#operation-createsloop-body-application-json-settings-preventinitialbackfill * Correct new field to use snake_case * Adds further settings management I think the generated settings model found at generated/slo/model_settings.go is still missing a final component here, but that file is generated by OpenAPI so we probably have a change to make in our open API docs, or we need to re-run the generation script there? * Remove unused function * Changelog * make docs-generate * Acceptance test * Tidy up the version check * make lint --------- Co-authored-by: Toby Brain --- CHANGELOG.md | 1 + docs/resources/kibana_slo.md | 1 + internal/kibana/slo.go | 34 +++++++++++++++++++----- internal/kibana/slo_test.go | 50 +++++++++++++++++++++++++++++------- 4 files changed, 71 insertions(+), 15 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a98032f68..3f27ecf86 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,6 +13,7 @@ - Migrate `elasticstack_kibana_action_connector` to the Terraform plugin framework ([#1269](https://github.com/elastic/terraform-provider-elasticstack/pull/1269)) - Migrate `elasticstack_elasticsearch_security_role_mapping` resource and data source to Terraform Plugin Framework ([#1279](https://github.com/elastic/terraform-provider-elasticstack/pull/1279)) - Add support for `inactivity_timeout` in `elasticstack_fleet_agent_policy` ([#641](https://github.com/elastic/terraform-provider-elasticstack/issues/641)) +- Add support for `prevent_initial_backfill` to `elasticstack_kibana_slo` ([#1071](https://github.com/elastic/terraform-provider-elasticstack/pull/1071)) - [Refactor] Regenerate the SLO client using the current OpenAPI spec ([#1303](https://github.com/elastic/terraform-provider-elasticstack/pull/1303)) - Add support for `data_view_id` in the `elasticstack_kibana_slo` resource ([#1305](https://github.com/elastic/terraform-provider-elasticstack/pull/1305)) - Add support for `unenrollment_timeout` in `elasticstack_fleet_agent_policy` ([#1169](https://github.com/elastic/terraform-provider-elasticstack/issues/1169)) diff --git a/docs/resources/kibana_slo.md b/docs/resources/kibana_slo.md index 61add57c0..788bdf740 100644 --- a/docs/resources/kibana_slo.md +++ b/docs/resources/kibana_slo.md @@ -442,6 +442,7 @@ Optional: Optional: - `frequency` (String) +- `prevent_initial_backfill` (Boolean) Prevents the underlying ES transform from attempting to backfill data on start, which can sometimes be resource-intensive or time-consuming and unnecessary - `sync_delay` (String) diff --git a/internal/kibana/slo.go b/internal/kibana/slo.go index 53dc3d19c..fb83c4a72 100644 --- a/internal/kibana/slo.go +++ b/internal/kibana/slo.go @@ -17,8 +17,9 @@ import ( ) var ( - SLOSupportsMultipleGroupByMinVersion = version.Must(version.NewVersion("8.14.0")) - SLOSupportsDataViewIDMinVersion = version.Must(version.NewVersion("8.15.0")) + SLOSupportsMultipleGroupByMinVersion = version.Must(version.NewVersion("8.14.0")) + SLOSupportsPreventInitialBackfillMinVersion = version.Must(version.NewVersion("8.15.0")) + SLOSupportsDataViewIDMinVersion = version.Must(version.NewVersion("8.15.0")) ) func ResourceSlo() *schema.Resource { @@ -565,6 +566,11 @@ func getSchema() map[string]*schema.Schema { Optional: true, Computed: true, }, + "prevent_initial_backfill": { + Description: "Prevents the underlying ES transform from attempting to backfill data on start, which can sometimes be resource-intensive or time-consuming and unnecessary", + Type: schema.TypeBool, + Optional: true, + }, }, }, }, @@ -824,8 +830,9 @@ func getSloFromResourceData(d *schema.ResourceData) (models.Slo, diag.Diagnostic } settings := slo.Settings{ - SyncDelay: getOrNil[string]("settings.0.sync_delay", d), - Frequency: getOrNil[string]("settings.0.frequency", d), + SyncDelay: getOrNil[string]("settings.0.sync_delay", d), + Frequency: getOrNil[string]("settings.0.frequency", d), + PreventInitialBackfill: getOrNil[bool]("settings.0.prevent_initial_backfill", d), } budgetingMethod := slo.BudgetingMethod(d.Get("budgeting_method").(string)) @@ -877,6 +884,13 @@ func resourceSloCreate(ctx context.Context, d *schema.ResourceData, meta interfa return diags } + // Version check for prevent_initial_backfill + if slo.Settings.PreventInitialBackfill != nil { + if !serverVersion.GreaterThanOrEqual(SLOSupportsPreventInitialBackfillMinVersion) { + return diag.Errorf("The 'prevent_initial_backfill' setting requires Elastic Stack version %s or higher.", SLOSupportsPreventInitialBackfillMinVersion) + } + } + // Version check for data_view_id support if !serverVersion.GreaterThanOrEqual(SLOSupportsDataViewIDMinVersion) { // Check all indicator types that support data_view_id @@ -919,6 +933,13 @@ func resourceSloUpdate(ctx context.Context, d *schema.ResourceData, meta interfa return diags } + // Version check for prevent_initial_backfill + if slo.Settings.PreventInitialBackfill != nil { + if !serverVersion.GreaterThanOrEqual(SLOSupportsPreventInitialBackfillMinVersion) { + return diag.Errorf("The 'prevent_initial_backfill' setting requires Elastic Stack version %s or higher.", SLOSupportsPreventInitialBackfillMinVersion) + } + } + // Version check for data_view_id support if !serverVersion.GreaterThanOrEqual(SLOSupportsDataViewIDMinVersion) { for _, indicatorType := range []string{"metric_custom_indicator", "histogram_custom_indicator", "kql_custom_indicator", "timeslice_metric_indicator"} { @@ -1149,8 +1170,9 @@ func resourceSloRead(ctx context.Context, d *schema.ResourceData, meta interface if err := d.Set("settings", []interface{}{ map[string]interface{}{ - "sync_delay": s.Settings.SyncDelay, - "frequency": s.Settings.Frequency, + "sync_delay": s.Settings.SyncDelay, + "frequency": s.Settings.Frequency, + "prevent_initial_backfill": s.Settings.PreventInitialBackfill, }, }); err != nil { return diag.FromErr(err) diff --git a/internal/kibana/slo_test.go b/internal/kibana/slo_test.go index 2f1216bed..d2c0a5b0c 100644 --- a/internal/kibana/slo_test.go +++ b/internal/kibana/slo_test.go @@ -332,6 +332,32 @@ func TestAccResourceSloGroupBy(t *testing.T) { }) } +func TestAccResourceSloPreventInitialBackfill(t *testing.T) { + sloName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) + resource.Test(t, resource.TestCase{ + PreCheck: func() { acctest.PreCheck(t) }, + CheckDestroy: checkResourceSloDestroy, + ProtoV6ProviderFactories: acctest.Providers, + Steps: []resource.TestStep{ + { + SkipFunc: versionutils.CheckIfVersionIsUnsupported(kibanaresource.SLOSupportsPreventInitialBackfillMinVersion), + Config: getSLOConfig(sloVars{ + name: sloName, + indicatorType: "metric_custom_indicator", + settingsEnabled: true, + groupBy: []string{"some.field", "some.other.field"}, + includePreventInitialBackfill: true, + }), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "metric_custom_indicator.0.index", "my-index-"+sloName), + + resource.TestCheckResourceAttr("elasticstack_kibana_slo.test_slo", "settings.0.prevent_initial_backfill", "true"), + ), + }, + }, + }) +} + func TestAccResourceSlo_timeslice_metric_indicator_basic(t *testing.T) { sloName := sdkacctest.RandStringFromCharSet(22, sdkacctest.CharSetAlphaNum) resource.Test(t, resource.TestCase{ @@ -793,24 +819,30 @@ func checkResourceSloDestroy(s *terraform.State) error { } type sloVars struct { - name string - indicatorType string - settingsEnabled bool - tags []string - groupBy []string - useSingleElementGroupBy bool - includeDataViewID bool + name string + indicatorType string + settingsEnabled bool + tags []string + groupBy []string + useSingleElementGroupBy bool + includeDataViewID bool + includePreventInitialBackfill bool } func getSLOConfig(vars sloVars) string { var settings string if vars.settingsEnabled { - settings = ` + preventInitialBackfill := "" + if vars.includePreventInitialBackfill { + preventInitialBackfill = "prevent_initial_backfill = true" + } + settings = fmt.Sprintf(` settings { sync_delay = "5m" frequency = "5m" + %s } - ` + `, preventInitialBackfill) } else { settings = "" } From 5b3127265a095164d02d8ed7b15afb916204f209 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Wed, 17 Sep 2025 15:42:17 +1000 Subject: [PATCH 57/66] Improve docs generation (#1313) * Add generic data source template * Update code level descriptions from the existing templates * Remove redundant templates * make docs-generate * Make links in docs a little nicer * make docs-generate --- Makefile | 2 +- .../elasticsearch_enrich_policy.md | 10 +-- .../elasticsearch_index_template.md | 11 ++-- docs/data-sources/elasticsearch_indices.md | 11 ++-- docs/data-sources/elasticsearch_info.md | 13 ++-- .../elasticsearch_ingest_processor_append.md | 15 ++--- .../elasticsearch_ingest_processor_bytes.md | 18 ++--- .../elasticsearch_ingest_processor_circle.md | 14 ++-- ...ticsearch_ingest_processor_community_id.md | 16 ++--- .../elasticsearch_ingest_processor_convert.md | 31 ++++++--- .../elasticsearch_ingest_processor_csv.md | 17 +++-- .../elasticsearch_ingest_processor_date.md | 17 +++-- ...search_ingest_processor_date_index_name.md | 15 +++-- .../elasticsearch_ingest_processor_dissect.md | 16 +++-- ...ticsearch_ingest_processor_dot_expander.md | 14 ++-- .../elasticsearch_ingest_processor_drop.md | 14 ++-- .../elasticsearch_ingest_processor_enrich.md | 13 ++-- .../elasticsearch_ingest_processor_fail.md | 14 ++-- ...sticsearch_ingest_processor_fingerprint.md | 13 ++-- .../elasticsearch_ingest_processor_foreach.md | 22 ++++--- .../elasticsearch_ingest_processor_geoip.md | 20 +++--- .../elasticsearch_ingest_processor_grok.md | 17 +++-- .../elasticsearch_ingest_processor_gsub.md | 14 ++-- ...asticsearch_ingest_processor_html_strip.md | 14 ++-- .../elasticsearch_ingest_processor_join.md | 14 ++-- .../elasticsearch_ingest_processor_json.md | 13 ++-- .../elasticsearch_ingest_processor_kv.md | 14 ++-- ...lasticsearch_ingest_processor_lowercase.md | 14 ++-- ...arch_ingest_processor_network_direction.md | 23 +++---- ...elasticsearch_ingest_processor_pipeline.md | 16 ++--- ...arch_ingest_processor_registered_domain.md | 14 ++-- .../elasticsearch_ingest_processor_remove.md | 14 ++-- .../elasticsearch_ingest_processor_rename.md | 14 ++-- .../elasticsearch_ingest_processor_reroute.md | 16 ++--- .../elasticsearch_ingest_processor_script.md | 24 ++----- .../elasticsearch_ingest_processor_set.md | 14 ++-- ...arch_ingest_processor_set_security_user.md | 14 ++-- .../elasticsearch_ingest_processor_sort.md | 14 ++-- .../elasticsearch_ingest_processor_split.md | 14 ++-- .../elasticsearch_ingest_processor_trim.md | 16 ++--- ...lasticsearch_ingest_processor_uppercase.md | 14 ++-- ...lasticsearch_ingest_processor_uri_parts.md | 14 ++-- ...lasticsearch_ingest_processor_urldecode.md | 14 ++-- ...asticsearch_ingest_processor_user_agent.md | 17 ++--- .../elasticsearch_security_role.md | 11 ++-- .../elasticsearch_security_role_mapping.md | 9 +-- .../elasticsearch_security_user.md | 11 ++-- .../elasticsearch_snapshot_repository.md | 9 +-- docs/data-sources/fleet_enrollment_tokens.md | 11 ++-- docs/data-sources/fleet_integration.md | 17 +++-- docs/data-sources/kibana_action_connector.md | 11 ++-- docs/data-sources/kibana_security_role.md | 11 ++-- docs/data-sources/kibana_spaces.md | 11 ++-- docs/resources/apm_agent_configuration.md | 15 +++-- .../elasticsearch_cluster_settings.md | 11 ++-- .../elasticsearch_component_template.md | 17 +++-- docs/resources/elasticsearch_data_stream.md | 13 ++-- .../elasticsearch_data_stream_lifecycle.md | 11 ++-- docs/resources/elasticsearch_enrich_policy.md | 15 +++-- docs/resources/elasticsearch_index.md | 18 +++-- .../elasticsearch_index_lifecycle.md | 11 ++-- .../resources/elasticsearch_index_template.md | 11 ++-- .../elasticsearch_ingest_pipeline.md | 23 +++---- .../elasticsearch_logstash_pipeline.md | 13 ++-- docs/resources/elasticsearch_script.md | 15 +++-- .../elasticsearch_security_api_key.md | 13 ++-- docs/resources/elasticsearch_security_role.md | 13 ++-- .../elasticsearch_security_role_mapping.md | 13 ++-- .../elasticsearch_security_system_user.md | 17 +++-- docs/resources/elasticsearch_security_user.md | 15 +++-- .../elasticsearch_snapshot_lifecycle.md | 13 ++-- .../elasticsearch_snapshot_repository.md | 23 ++++--- docs/resources/elasticsearch_transform.md | 16 +++-- docs/resources/elasticsearch_watch.md | 15 +++-- docs/resources/fleet_agent_policy.md | 13 ++-- docs/resources/fleet_integration.md | 13 ++-- docs/resources/fleet_integration_policy.md | 16 ++++- docs/resources/fleet_output.md | 13 ++-- docs/resources/fleet_server_host.md | 13 ++-- docs/resources/kibana_action_connector.md | 13 ++-- docs/resources/kibana_alerting_rule.md | 29 +++++---- docs/resources/kibana_data_view.md | 13 ++-- docs/resources/kibana_import_saved_objects.md | 13 ++-- docs/resources/kibana_maintenance_window.md | 11 ++-- docs/resources/kibana_security_role.md | 18 +++-- docs/resources/kibana_slo.md | 11 ++-- docs/resources/kibana_space.md | 13 ++-- docs/resources/kibana_synthetics_monitor.md | 24 ++++--- docs/resources/kibana_synthetics_parameter.md | 14 ++-- .../kibana_synthetics_private_location.md | 14 ++-- .../resource.tf | 21 ++++++ .../resource2.tf | 18 ----- internal/apm/agent_configuration/schema.go | 2 +- internal/elasticsearch/cluster/script.go | 2 +- internal/elasticsearch/cluster/settings.go | 2 +- internal/elasticsearch/cluster/slm.go | 2 +- .../cluster/snapshot_repository.go | 12 ++-- .../snapshot_repository_data_source.go | 2 +- internal/elasticsearch/enrich/data_source.go | 2 +- internal/elasticsearch/enrich/resource.go | 2 +- .../elasticsearch/index/component_template.go | 6 +- .../elasticsearch/index/indices/schema.go | 2 +- .../index/template_data_source.go | 2 +- .../ingest/processor_append_data_source.go | 2 +- .../ingest/processor_bytes_data_source.go | 6 +- .../ingest/processor_bytes_data_source.md | 5 ++ .../ingest/processor_circle_data_source.go | 2 +- .../processor_community_id_data_source.go | 7 +- .../processor_community_id_data_source.md | 4 ++ .../ingest/processor_convert_data_source.go | 8 ++- .../ingest/processor_convert_data_source.md | 17 +++++ .../ingest/processor_csv_data_source.go | 7 +- .../ingest/processor_csv_data_source.md | 3 + .../ingest/processor_date_data_source.go | 7 +- .../ingest/processor_date_data_source.md | 3 + .../processor_date_index_name_data_source.go | 6 +- .../processor_date_index_name_data_source.md | 5 ++ .../ingest/processor_dissect_data_source.go | 6 +- .../ingest/processor_dissect_data_source.md | 5 ++ .../processor_dissect_dot_expander_source.go | 2 +- .../ingest/processor_drop_data_source.go | 2 +- .../ingest/processor_enrich_data_source.go | 2 +- .../ingest/processor_fail_data_source.go | 2 +- .../processor_fingerprint_data_source.go | 2 +- .../ingest/processor_foreach_data_source.go | 6 +- .../ingest/processor_foreach_data_source.md | 25 +------ .../ingest/processor_geoip_data_source.go | 6 +- .../ingest/processor_geoip_data_source.md | 7 ++ .../ingest/processor_grok_data_source.go | 6 +- .../ingest/processor_grok_data_source.md | 5 ++ .../ingest/processor_gsub_data_source.go | 2 +- .../processor_html_strip_data_source.go | 2 +- .../ingest/processor_join_data_source.go | 2 +- .../ingest/processor_json_data_source.go | 2 +- .../ingest/processor_kv_data_source.go | 2 +- .../ingest/processor_lowercase_data_source.go | 2 +- ...processor_network_direction_data_source.go | 8 ++- ...processor_network_direction_data_source.md | 27 +------- .../ingest/processor_pipeline_data_source.go | 2 +- ...processor_registered_domain_data_source.go | 2 +- .../ingest/processor_remove_data_source.go | 2 +- .../ingest/processor_rename_data_source.go | 2 +- .../ingest/processor_reroute_data_source.go | 2 +- .../ingest/processor_script_data_source.go | 2 +- .../ingest/processor_script_data_source.md | 11 ++++ .../ingest/processor_set_data_source.go | 2 +- ...processor_set_security_user_data_source.go | 2 +- ...processor_set_security_user_data_source.md | 8 +++ .../ingest/processor_sort_data_source.go | 2 +- .../ingest/processor_split_data_source.go | 2 +- .../ingest/processor_trim_data_source.go | 2 +- .../ingest/processor_uppercase_data_source.go | 2 +- .../ingest/processor_uri_parts_data_source.go | 2 +- .../ingest/processor_urldecode_data_source.go | 2 +- .../processor_user_agent_data_source.go | 2 +- .../elasticsearch/security/api_key/schema.go | 2 +- internal/elasticsearch/security/role.go | 2 +- .../security/role_mapping/schema.go | 2 +- .../system_user/resource-description.md | 3 + .../security/system_user/schema.go | 10 ++- internal/elasticsearch/security/user.go | 4 +- .../security/user_data_source.go | 2 +- internal/elasticsearch/transform/transform.go | 8 ++- internal/elasticsearch/transform/transform.md | 3 + internal/fleet/enrollment_tokens/schema.go | 2 +- internal/fleet/integration/schema.go | 7 +- internal/fleet/integration_ds/schema.go | 11 +++- .../resource-description.md | 22 ------- internal/fleet/integration_policy/schema.go | 6 +- internal/kibana/alerting.go | 7 +- internal/kibana/alerting.md | 7 ++ internal/kibana/data_view/schema.go | 2 +- .../kibana/import_saved_objects/schema.go | 2 +- internal/kibana/maintenance_window/schema.go | 2 +- internal/kibana/role.go | 7 +- internal/kibana/role.md | 5 ++ internal/kibana/role_data_source.go | 2 +- internal/kibana/slo.go | 2 +- internal/kibana/space.go | 2 +- internal/kibana/spaces/schema.go | 2 +- .../parameter/resource-description.md | 4 ++ .../kibana/synthetics/parameter/schema.go | 6 +- .../private_location/resource-description.md | 4 ++ .../synthetics/private_location/schema.go | 6 +- .../kibana/synthetics/resource-description.md | 12 ++++ internal/kibana/synthetics/schema.go | 6 +- internal/utils/validation.go | 2 +- templates/data-sources.md.tmpl | 57 ++++++++++++++++ .../elasticsearch_enrich_policy.md.tmpl | 16 ----- .../elasticsearch_index_template.md.tmpl | 17 ----- .../elasticsearch_indices.md.tmpl | 17 ----- .../data-sources/elasticsearch_info.md.tmpl | 17 ----- ...sticsearch_ingest_processor_append.md.tmpl | 21 ------ ...asticsearch_ingest_processor_bytes.md.tmpl | 22 ------- ...sticsearch_ingest_processor_circle.md.tmpl | 20 ------ ...arch_ingest_processor_community_id.md.tmpl | 23 ------- ...ticsearch_ingest_processor_convert.md.tmpl | 28 -------- ...elasticsearch_ingest_processor_csv.md.tmpl | 22 ------- ...lasticsearch_ingest_processor_date.md.tmpl | 22 ------- ...h_ingest_processor_date_index_name.md.tmpl | 23 ------- ...ticsearch_ingest_processor_dissect.md.tmpl | 22 ------- ...arch_ingest_processor_dot_expander.md.tmpl | 20 ------ ...lasticsearch_ingest_processor_drop.md.tmpl | 20 ------ ...sticsearch_ingest_processor_enrich.md.tmpl | 19 ------ ...lasticsearch_ingest_processor_fail.md.tmpl | 20 ------ ...earch_ingest_processor_fingerprint.md.tmpl | 19 ------ ...asticsearch_ingest_processor_geoip.md.tmpl | 27 -------- ...lasticsearch_ingest_processor_grok.md.tmpl | 25 ------- ...lasticsearch_ingest_processor_gsub.md.tmpl | 20 ------ ...search_ingest_processor_html_strip.md.tmpl | 20 ------ ...lasticsearch_ingest_processor_join.md.tmpl | 20 ------ ...lasticsearch_ingest_processor_json.md.tmpl | 19 ------ .../elasticsearch_ingest_processor_kv.md.tmpl | 20 ------ ...csearch_ingest_processor_lowercase.md.tmpl | 20 ------ ...icsearch_ingest_processor_pipeline.md.tmpl | 22 ------- ...ingest_processor_registered_domain.md.tmpl | 20 ------ ...sticsearch_ingest_processor_remove.md.tmpl | 20 ------ ...sticsearch_ingest_processor_rename.md.tmpl | 20 ------ ...ticsearch_ingest_processor_reroute.md.tmpl | 20 ------ ...sticsearch_ingest_processor_script.md.tmpl | 30 --------- ...elasticsearch_ingest_processor_set.md.tmpl | 20 ------ ...ingest_processor_set_security_user.md.tmpl | 20 ------ ...lasticsearch_ingest_processor_sort.md.tmpl | 20 ------ ...asticsearch_ingest_processor_split.md.tmpl | 20 ------ ...lasticsearch_ingest_processor_trim.md.tmpl | 22 ------- ...csearch_ingest_processor_uppercase.md.tmpl | 20 ------ ...csearch_ingest_processor_uri_parts.md.tmpl | 20 ------ ...csearch_ingest_processor_urldecode.md.tmpl | 20 ------ ...search_ingest_processor_user_agent.md.tmpl | 23 ------- .../elasticsearch_security_role.md.tmpl | 17 ----- ...lasticsearch_security_role_mapping.md.tmpl | 17 ----- .../elasticsearch_security_user.md.tmpl | 17 ----- .../elasticsearch_snapshot_repository.md.tmpl | 17 ----- .../fleet_enrollment_tokens.md.tmpl | 17 ----- .../data-sources/fleet_integration.md.tmpl | 26 -------- .../kibana_action_connector.md.tmpl | 17 ----- .../data-sources/kibana_security_role.md.tmpl | 17 ----- templates/data-sources/kibana_spaces.md.tmpl | 17 ----- templates/resources.md.tmpl | 65 +++++++++++++++++++ .../resources/apm_agent_configuration.md.tmpl | 23 ------- .../elasticsearch_cluster_settings.md.tmpl | 17 ----- .../elasticsearch_component_template.md.tmpl | 23 ------- .../elasticsearch_data_stream.md.tmpl | 23 ------- ...lasticsearch_data_stream_lifecycle.md.tmpl | 23 ------- .../elasticsearch_enrich_policy.md.tmpl | 23 ------- .../resources/elasticsearch_index.md.tmpl | 28 -------- .../elasticsearch_index_lifecycle.md.tmpl | 23 ------- .../elasticsearch_index_template.md.tmpl | 23 ------- .../elasticsearch_ingest_pipeline.md.tmpl | 31 --------- .../elasticsearch_logstash_pipeline.md.tmpl | 23 ------- .../resources/elasticsearch_script.md.tmpl | 23 ------- .../elasticsearch_security_api_key.md.tmpl | 21 ------ .../elasticsearch_security_role.md.tmpl | 23 ------- ...lasticsearch_security_role_mapping.md.tmpl | 23 ------- ...elasticsearch_security_system_user.md.tmpl | 18 ----- .../elasticsearch_security_user.md.tmpl | 23 ------- .../elasticsearch_snapshot_lifecycle.md.tmpl | 23 ------- .../elasticsearch_snapshot_repository.md.tmpl | 23 ------- .../resources/elasticsearch_transform.md.tmpl | 25 ------- .../resources/elasticsearch_watch.md.tmpl | 23 ------- .../resources/fleet_agent_policy.md.tmpl | 23 ------- templates/resources/fleet_integration.md.tmpl | 22 ------- templates/resources/fleet_output.md.tmpl | 23 ------- templates/resources/fleet_server_host.md.tmpl | 23 ------- .../resources/kibana_action_connector.md.tmpl | 23 ------- .../resources/kibana_alerting_rule.md.tmpl | 30 --------- templates/resources/kibana_data_view.md.tmpl | 23 ------- .../kibana_import_saved_objects.md.tmpl | 21 ------ .../kibana_maintenance_window.md.tmpl | 23 ------- .../resources/kibana_security_role.md.tmpl | 36 +++++++--- templates/resources/kibana_slo.md.tmpl | 23 ------- templates/resources/kibana_space.md.tmpl | 23 ------- .../kibana_synthetics_monitor.md.tmpl | 34 ---------- .../kibana_synthetics_parameter.md.tmpl | 25 ------- ...kibana_synthetics_private_location.md.tmpl | 25 ------- 275 files changed, 1164 insertions(+), 2652 deletions(-) delete mode 100644 examples/resources/elasticstack_elasticsearch_ingest_pipeline/resource2.tf create mode 100644 internal/elasticsearch/ingest/processor_bytes_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_community_id_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_convert_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_csv_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_date_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_date_index_name_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_dissect_data_source.md rename templates/data-sources/elasticsearch_ingest_processor_foreach.md.tmpl => internal/elasticsearch/ingest/processor_foreach_data_source.md (55%) create mode 100644 internal/elasticsearch/ingest/processor_geoip_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_grok_data_source.md rename templates/data-sources/elasticsearch_ingest_processor_network_direction.md.tmpl => internal/elasticsearch/ingest/processor_network_direction_data_source.md (59%) create mode 100644 internal/elasticsearch/ingest/processor_script_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_set_security_user_data_source.md create mode 100644 internal/elasticsearch/security/system_user/resource-description.md create mode 100644 internal/elasticsearch/transform/transform.md rename templates/resources/fleet_integration_policy.md.tmpl => internal/fleet/integration_policy/resource-description.md (52%) create mode 100644 internal/kibana/alerting.md create mode 100644 internal/kibana/role.md create mode 100644 internal/kibana/synthetics/parameter/resource-description.md create mode 100644 internal/kibana/synthetics/private_location/resource-description.md create mode 100644 internal/kibana/synthetics/resource-description.md create mode 100644 templates/data-sources.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_enrich_policy.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_index_template.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_indices.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_info.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_append.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_bytes.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_circle.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_community_id.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_convert.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_csv.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_date.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_date_index_name.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_dissect.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_dot_expander.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_drop.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_enrich.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_fail.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_fingerprint.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_geoip.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_grok.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_gsub.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_html_strip.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_join.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_json.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_kv.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_lowercase.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_pipeline.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_registered_domain.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_remove.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_rename.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_reroute.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_script.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_set.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_set_security_user.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_sort.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_split.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_trim.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_uppercase.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_uri_parts.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_urldecode.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_user_agent.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_security_role.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_security_role_mapping.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_security_user.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_snapshot_repository.md.tmpl delete mode 100644 templates/data-sources/fleet_enrollment_tokens.md.tmpl delete mode 100644 templates/data-sources/fleet_integration.md.tmpl delete mode 100644 templates/data-sources/kibana_action_connector.md.tmpl delete mode 100644 templates/data-sources/kibana_security_role.md.tmpl delete mode 100644 templates/data-sources/kibana_spaces.md.tmpl create mode 100644 templates/resources.md.tmpl delete mode 100644 templates/resources/apm_agent_configuration.md.tmpl delete mode 100644 templates/resources/elasticsearch_cluster_settings.md.tmpl delete mode 100644 templates/resources/elasticsearch_component_template.md.tmpl delete mode 100644 templates/resources/elasticsearch_data_stream.md.tmpl delete mode 100644 templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl delete mode 100644 templates/resources/elasticsearch_enrich_policy.md.tmpl delete mode 100644 templates/resources/elasticsearch_index.md.tmpl delete mode 100644 templates/resources/elasticsearch_index_lifecycle.md.tmpl delete mode 100644 templates/resources/elasticsearch_index_template.md.tmpl delete mode 100644 templates/resources/elasticsearch_ingest_pipeline.md.tmpl delete mode 100644 templates/resources/elasticsearch_logstash_pipeline.md.tmpl delete mode 100644 templates/resources/elasticsearch_script.md.tmpl delete mode 100644 templates/resources/elasticsearch_security_api_key.md.tmpl delete mode 100644 templates/resources/elasticsearch_security_role.md.tmpl delete mode 100644 templates/resources/elasticsearch_security_role_mapping.md.tmpl delete mode 100644 templates/resources/elasticsearch_security_system_user.md.tmpl delete mode 100644 templates/resources/elasticsearch_security_user.md.tmpl delete mode 100644 templates/resources/elasticsearch_snapshot_lifecycle.md.tmpl delete mode 100644 templates/resources/elasticsearch_snapshot_repository.md.tmpl delete mode 100644 templates/resources/elasticsearch_transform.md.tmpl delete mode 100644 templates/resources/elasticsearch_watch.md.tmpl delete mode 100644 templates/resources/fleet_agent_policy.md.tmpl delete mode 100644 templates/resources/fleet_integration.md.tmpl delete mode 100644 templates/resources/fleet_output.md.tmpl delete mode 100644 templates/resources/fleet_server_host.md.tmpl delete mode 100644 templates/resources/kibana_action_connector.md.tmpl delete mode 100644 templates/resources/kibana_alerting_rule.md.tmpl delete mode 100644 templates/resources/kibana_data_view.md.tmpl delete mode 100644 templates/resources/kibana_import_saved_objects.md.tmpl delete mode 100644 templates/resources/kibana_maintenance_window.md.tmpl delete mode 100644 templates/resources/kibana_slo.md.tmpl delete mode 100644 templates/resources/kibana_space.md.tmpl delete mode 100644 templates/resources/kibana_synthetics_monitor.md.tmpl delete mode 100644 templates/resources/kibana_synthetics_parameter.md.tmpl delete mode 100644 templates/resources/kibana_synthetics_private_location.md.tmpl diff --git a/Makefile b/Makefile index bac041a4c..d94bcf925 100644 --- a/Makefile +++ b/Makefile @@ -225,7 +225,7 @@ docker-clean: ## Try to remove provisioned nodes and assigned network .PHONY: docs-generate docs-generate: tools ## Generate documentation for the provider - @ go tool github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate --provider-name elasticstack + @ go tool github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate --provider-name terraform-provider-elasticstack .PHONY: gen diff --git a/docs/data-sources/elasticsearch_enrich_policy.md b/docs/data-sources/elasticsearch_enrich_policy.md index 9c286ae92..b28cc69ad 100644 --- a/docs/data-sources/elasticsearch_enrich_policy.md +++ b/docs/data-sources/elasticsearch_enrich_policy.md @@ -1,13 +1,15 @@ + --- -subcategory: "Enrich" +# generated by https://github.com/hashicorp/terraform-plugin-docs page_title: "elasticstack_elasticsearch_enrich_policy Data Source - terraform-provider-elasticstack" +subcategory: "Enrich" description: |- - Returns information about an enrich policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html + Returns information about an enrich policy. See the enrich policy API documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html for more details. --- -# Data Source: elasticstack_elasticsearch_enrich_policy +# elasticstack_elasticsearch_enrich_policy (Data Source) -Returns information about an enrich policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html +Returns information about an enrich policy. See the [enrich policy API documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html) for more details. ## Example Usage diff --git a/docs/data-sources/elasticsearch_index_template.md b/docs/data-sources/elasticsearch_index_template.md index 9beebbb90..da0da4488 100644 --- a/docs/data-sources/elasticsearch_index_template.md +++ b/docs/data-sources/elasticsearch_index_template.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_index_template Data Source - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index_template Data Source" description: |- - Retrieves index template. + Retrieves information about an existing index template definition. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template.html --- -# Data Source: elasticstack_elasticsearch_index_template +# elasticstack_elasticsearch_index_template (Data Source) -Use this data source to retrieve information about existing Elasticsearch index templates. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template.html +Retrieves information about an existing index template definition. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_indices.md b/docs/data-sources/elasticsearch_indices.md index 36ad08c9d..1a0fdb5f4 100644 --- a/docs/data-sources/elasticsearch_indices.md +++ b/docs/data-sources/elasticsearch_indices.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_indices Data Source - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_indices Data Source" description: |- - Retrieves indices. + Retrieves information about existing Elasticsearch indices. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html --- -# Data Source: elasticstack_elasticsearch_indices +# elasticstack_elasticsearch_indices (Data Source) -Use this data source to retrieve and get information about existing Elasticsearch indices. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html +Retrieves information about existing Elasticsearch indices. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_info.md b/docs/data-sources/elasticsearch_info.md index 2ea14d1d8..1545d9c1b 100644 --- a/docs/data-sources/elasticsearch_info.md +++ b/docs/data-sources/elasticsearch_info.md @@ -1,14 +1,15 @@ + --- -subcategory: "Cluster" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_info Data Source" +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_info Data Source - terraform-provider-elasticstack" +subcategory: "Elasticsearch" description: |- - Gets information about the Elasticsearch cluster. + Gets information about the Elastic cluster. --- -# Data Source: elasticstack_elasticsearch_info +# elasticstack_elasticsearch_info (Data Source) -This data source provides the information about the configured Elasticsearch cluster +Gets information about the Elastic cluster. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_append.md b/docs/data-sources/elasticsearch_ingest_processor_append.md index f33f66eb2..da6750d1e 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_append.md +++ b/docs/data-sources/elasticsearch_ingest_processor_append.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_append Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_append Data Source" description: |- - Helper data source to create a processor which appends one or more values to an existing array if the field already exists and it is an array. + Helper data source which can be used to create the configuration for an append processor. This processor appends one or more values to an existing array if the field already exists and it is an array. Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar. Creates an array containing the provided values if the field doesn’t exist. See the append processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/append-processor.html for more details. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_append - -Helper data source to which can be used to create a processor to append one or more values to an existing array if the field already exists and it is an array. -Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar. Creates an array containing the provided values if the field doesn’t exist. +# elasticstack_elasticsearch_ingest_processor_append (Data Source) -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/append-processor.html +Helper data source which can be used to create the configuration for an append processor. This processor appends one or more values to an existing array if the field already exists and it is an array. Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar. Creates an array containing the provided values if the field doesn’t exist. See the [append processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/append-processor.html) for more details. ## Example Usage @@ -56,4 +54,3 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_bytes.md b/docs/data-sources/elasticsearch_ingest_processor_bytes.md index 692f8b225..4f7b5782e 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_bytes.md +++ b/docs/data-sources/elasticsearch_ingest_processor_bytes.md @@ -1,18 +1,21 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_bytes Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_bytes Data Source" description: |- - Helper data source to create a processor which converts a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). + Helper data source which can be used to create the configuration for a bytes processor. The processor converts a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). See the bytes processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/bytes-processor.html for more details. + If the field is an array of strings, all members of the array will be converted. + Supported human readable units are "b", "kb", "mb", "gb", "tb", "pb" case insensitive. An error will occur if the field is not a supported format or resultant value exceeds 2^63. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_bytes +# elasticstack_elasticsearch_ingest_processor_bytes (Data Source) -Helper data source to which can be used to create a processor to convert a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). If the field is an array of strings, all members of the array will be converted. +Helper data source which can be used to create the configuration for a bytes processor. The processor converts a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). See the [bytes processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/bytes-processor.html) for more details. -Supported human readable units are "b", "kb", "mb", "gb", "tb", "pb" case insensitive. An error will occur if the field is not a supported format or resultant value exceeds 2^63. +If the field is an array of strings, all members of the array will be converted. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/bytes-processor.html +Supported human readable units are "b", "kb", "mb", "gb", "tb", "pb" case insensitive. An error will occur if the field is not a supported format or resultant value exceeds 2^63. ## Example Usage @@ -55,4 +58,3 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_circle.md b/docs/data-sources/elasticsearch_ingest_processor_circle.md index a526a56f5..b5784a6d1 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_circle.md +++ b/docs/data-sources/elasticsearch_ingest_processor_circle.md @@ -1,16 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_circle Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_circle Data Source" description: |- - Helper data source to create a processor which converts circle definitions of shapes to regular polygons which approximate them. + Helper data source which can be used to create the configuration for an circle processor. This processor converts circle definitions of shapes to regular polygons which approximate them. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-circle-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_circle - -Helper data source to which can be used to create a processor to convert circle definitions of shapes to regular polygons which approximate them. +# elasticstack_elasticsearch_ingest_processor_circle (Data Source) -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-circle-processor.html +Helper data source which can be used to create the configuration for an circle processor. This processor converts circle definitions of shapes to regular polygons which approximate them. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-circle-processor.html ## Example Usage @@ -57,4 +56,3 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_community_id.md b/docs/data-sources/elasticsearch_ingest_processor_community_id.md index bb9bda3d1..c7376d59c 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_community_id.md +++ b/docs/data-sources/elasticsearch_ingest_processor_community_id.md @@ -1,20 +1,21 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_community_id Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_community_id Data Source" description: |- - Helper data source to create a processor which computes the Community ID for network flow data as defined in the Community ID Specification. + Helper data source which can be used to create the configuration for a community ID processor. This processor computes the Community ID for network flow data as defined in the Community ID Specification. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/community-id-processor.html + You can use a community ID to correlate network events related to a single flow. + The community ID processor reads network flow data from related Elastic Common Schema (ECS) https://www.elastic.co/guide/en/ecs/1.12 fields by default. If you use the ECS, no configuration is required. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_community_id +# elasticstack_elasticsearch_ingest_processor_community_id (Data Source) -Helper data source to which can be used to create a processor to compute the Community ID for network flow data as defined in the [Community ID Specification](https://github.com/corelight/community-id-spec). +Helper data source which can be used to create the configuration for a community ID processor. This processor computes the Community ID for network flow data as defined in the Community ID Specification. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/community-id-processor.html You can use a community ID to correlate network events related to a single flow. The community ID processor reads network flow data from related [Elastic Common Schema (ECS)](https://www.elastic.co/guide/en/ecs/1.12) fields by default. If you use the ECS, no configuration is required. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/community-id-processor.html - ## Example Usage ```terraform @@ -59,4 +60,3 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_convert.md b/docs/data-sources/elasticsearch_ingest_processor_convert.md index 25f6d94be..8e120aa6f 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_convert.md +++ b/docs/data-sources/elasticsearch_ingest_processor_convert.md @@ -1,25 +1,37 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_convert Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_convert Data Source" description: |- - Helper data source to create a processor which converts a field in the currently ingested document to a different type, such as converting a string to an integer. + Helper data source which can be used to create the configuration for a convert processor. This processor converts a field in the currently ingested document to a different type, such as converting a string to an integer. See the convert processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/convert-processor.html for more details. + The supported types include: + integerlongfloatdoublestringbooleanipauto + Specifying boolean will set the field to true if its string value is equal to true (ignoring case), to false if its string value is equal to false (ignoring case), or it will throw an exception otherwise. + Specifying ip will set the target field to the value of field if it contains a valid IPv4 or IPv6 address that can be indexed into an IP field type. + Specifying auto will attempt to convert the string-valued field into the closest non-string, non-IP type. For example, a field whose value is "true" will be converted to its respective boolean type: true. Do note that float takes precedence of double in auto. A value of "242.15" will "automatically" be converted to 242.15 of type float. If a provided field cannot be appropriately converted, the processor will still process successfully and leave the field value as-is. In such a case, target_field will be updated with the unconverted field value. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_convert +# elasticstack_elasticsearch_ingest_processor_convert (Data Source) -Helper data source to which can be used to convert a field in the currently ingested document to a different type, such as converting a string to an integer. If the field value is an array, all members will be converted. +Helper data source which can be used to create the configuration for a convert processor. This processor converts a field in the currently ingested document to a different type, such as converting a string to an integer. See the [convert processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/convert-processor.html) for more details. -The supported types include: `integer`, `long`, `float`, `double`, `string`, `boolean`, `ip`, and `auto`. +The supported types include: +- `integer` +- `long` +- `float` +- `double` +- `string` +- `boolean` +- `ip` +- `auto` -Specifying `boolean` will set the field to true if its string value is equal to true (ignore case), to false if its string value is equal to false (ignore case), or it will throw an exception otherwise. +Specifying `boolean` will set the field to true if its string value is equal to true (ignoring case), to false if its string value is equal to false (ignoring case), or it will throw an exception otherwise. Specifying `ip` will set the target field to the value of `field` if it contains a valid IPv4 or IPv6 address that can be indexed into an IP field type. Specifying `auto` will attempt to convert the string-valued `field` into the closest non-string, non-IP type. For example, a field whose value is "true" will be converted to its respective boolean type: true. Do note that float takes precedence of double in auto. A value of "242.15" will "automatically" be converted to 242.15 of type `float`. If a provided field cannot be appropriately converted, the processor will still process successfully and leave the field value as-is. In such a case, `target_field` will be updated with the unconverted field value. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/convert-processor.html - ## Example Usage ```terraform @@ -64,4 +76,3 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_csv.md b/docs/data-sources/elasticsearch_ingest_processor_csv.md index b1b0aa307..be515d791 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_csv.md +++ b/docs/data-sources/elasticsearch_ingest_processor_csv.md @@ -1,16 +1,18 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_csv Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_csv Data Source" description: |- - Helper data source to create a processor which extracts fields from CSV line out of a single text field within a document. + Helper data source which can be used to create the configuration for a CSV processor. This processor extracts fields from CSV line out of a single text field within a document. Any empty field in CSV will be skipped. See the CSV processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/csv-processor.html for more details. + If the trim option is enabled then any whitespace in the beginning and in the end of each unquoted field will be trimmed. For example with configuration above, a value of A, B will result in field field2 having value {nbsp}B (with space at the beginning). If trim is enabled A, B will result in field field2 having value B (no whitespace). Quoted fields will be left untouched. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_csv +# elasticstack_elasticsearch_ingest_processor_csv (Data Source) -Helper data source to which can be used to extract fields from CSV line out of a single text field within a document. Any empty field in CSV will be skipped. +Helper data source which can be used to create the configuration for a CSV processor. This processor extracts fields from CSV line out of a single text field within a document. Any empty field in CSV will be skipped. See the [CSV processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/csv-processor.html) for more details. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/csv-processor.html +If the `trim` option is enabled then any whitespace in the beginning and in the end of each unquoted field will be trimmed. For example with configuration above, a value of A, B will result in field field2 having value {nbsp}B (with space at the beginning). If trim is enabled A, B will result in field field2 having value B (no whitespace). Quoted fields will be left untouched. ## Example Usage @@ -33,8 +35,6 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { } ``` -If the `trim` option is enabled then any whitespace in the beginning and in the end of each unquoted field will be trimmed. For example with configuration above, a value of A, B will result in field field2 having value {nbsp}B (with space at the beginning). If trim is enabled A, B will result in field field2 having value B (no whitespace). Quoted fields will be left untouched. - ## Schema @@ -60,4 +60,3 @@ If the `trim` option is enabled then any whitespace in the beginning and in the - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_date.md b/docs/data-sources/elasticsearch_ingest_processor_date.md index 69d002d8a..14e7a8a31 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_date.md +++ b/docs/data-sources/elasticsearch_ingest_processor_date.md @@ -1,22 +1,21 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_date Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_date Data Source" description: |- - Helper data source to create a processor which parses dates from fields, and then uses the date or timestamp as the timestamp for the document. + Helper data source which can be used to create the configuration for a date processor. This processor parses dates from fields, and then uses the date or timestamp as the timestamp for the document. See the date processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/date-processor.html for more details. + By default, the date processor adds the parsed date as a new field called @timestamp. You can specify a different field by setting the target_field configuration parameter. Multiple date formats are supported as part of the same date processor definition. They will be used sequentially to attempt parsing the date field, in the same order they were defined as part of the processor definition. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_date +# elasticstack_elasticsearch_ingest_processor_date (Data Source) -Helper data source to which can be used to parse dates from fields, and then uses the date or timestamp as the timestamp for the document. -By default, the date processor adds the parsed date as a new field called `@timestamp`. You can specify a different field by setting the `target_field` configuration parameter. Multiple date formats are supported as part of the same date processor definition. They will be used sequentially to attempt parsing the date field, in the same order they were defined as part of the processor definition. +Helper data source which can be used to create the configuration for a date processor. This processor parses dates from fields, and then uses the date or timestamp as the timestamp for the document. See the [date processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/date-processor.html) for more details. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-processor.html +By default, the date processor adds the parsed date as a new field called `@timestamp`. You can specify a different field by setting the `target_field` configuration parameter. Multiple date formats are supported as part of the same date processor definition. They will be used sequentially to attempt parsing the date field, in the same order they were defined as part of the processor definition. ## Example Usage -Here is an example that adds the parsed date to the `timestamp` field based on the `initial_date` field: - ```terraform provider "elasticstack" { elasticsearch {} diff --git a/docs/data-sources/elasticsearch_ingest_processor_date_index_name.md b/docs/data-sources/elasticsearch_ingest_processor_date_index_name.md index 64580f566..409c639b8 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_date_index_name.md +++ b/docs/data-sources/elasticsearch_ingest_processor_date_index_name.md @@ -1,21 +1,22 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_date_index_name Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_date_index_name Data Source" description: |- - Helper data source to create a processor which helps to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. + Helper data source which can be used to create the configuration for a date index name processor. The purpose of this processor is to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. See the date index name processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/date-index-name-processor.html for more details. + The processor sets the _index metadata field with a date math index name expression based on the provided index name prefix, a date or timestamp field in the documents being processed and the provided date rounding. + First, this processor fetches the date or timestamp from a field in the document being processed. Optionally, date formatting can be configured on how the field’s value should be parsed into a date. Then this date, the provided index name prefix and the provided date rounding get formatted into a date math index name expression. Also here optionally date formatting can be specified on how the date should be formatted into a date math index name expression. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_date_index_name +# elasticstack_elasticsearch_ingest_processor_date_index_name (Data Source) -The purpose of this processor is to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. +Helper data source which can be used to create the configuration for a date index name processor. The purpose of this processor is to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. See the [date index name processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/date-index-name-processor.html) for more details. The processor sets the _index metadata field with a date math index name expression based on the provided index name prefix, a date or timestamp field in the documents being processed and the provided date rounding. First, this processor fetches the date or timestamp from a field in the document being processed. Optionally, date formatting can be configured on how the field’s value should be parsed into a date. Then this date, the provided index name prefix and the provided date rounding get formatted into a date math index name expression. Also here optionally date formatting can be specified on how the date should be formatted into a date math index name expression. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-index-name-processor.html - ## Example Usage ```terraform diff --git a/docs/data-sources/elasticsearch_ingest_processor_dissect.md b/docs/data-sources/elasticsearch_ingest_processor_dissect.md index 4f8bf46eb..5754da6df 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_dissect.md +++ b/docs/data-sources/elasticsearch_ingest_processor_dissect.md @@ -1,20 +1,22 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_dissect Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_dissect Data Source" description: |- - Helper data source to create a processor which extracts structured fields out of a single text field within a document. + Helper data source which can be used to create the configuration for a dissect processor. This processor extracts structured fields out of a single text field within a document. See the dissect processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/dissect-processor.html#dissect-processor for more details. + Similar to the Grok Processor, dissect also extracts structured fields out of a single text field within a document. However unlike the Grok Processor, dissect does not use Regular Expressions. This allows dissect’s syntax to be simple and for some cases faster than the Grok Processor. + Dissect matches a single text field against a defined pattern. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_dissect +# elasticstack_elasticsearch_ingest_processor_dissect (Data Source) + +Helper data source which can be used to create the configuration for a dissect processor. This processor extracts structured fields out of a single text field within a document. See the [dissect processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/dissect-processor.html#dissect-processor) for more details. Similar to the Grok Processor, dissect also extracts structured fields out of a single text field within a document. However unlike the Grok Processor, dissect does not use Regular Expressions. This allows dissect’s syntax to be simple and for some cases faster than the Grok Processor. Dissect matches a single text field against a defined pattern. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/dissect-processor.html - ## Example Usage ```terraform diff --git a/docs/data-sources/elasticsearch_ingest_processor_dot_expander.md b/docs/data-sources/elasticsearch_ingest_processor_dot_expander.md index 6c0743fae..0f5d11fc0 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_dot_expander.md +++ b/docs/data-sources/elasticsearch_ingest_processor_dot_expander.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_dot_expander Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_dot_expander Data Source" description: |- - Helper data source to create a processor which expands a field with dots into an object field. + Helper data source which can be used to create the configuration for a dot expander processor. This processor expands a field with dots into an object field. See the dot expand processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/dot-expand-processor.html for more details. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_dot_expander - -Expands a field with dots into an object field. This processor allows fields with dots in the name to be accessible by other processors in the pipeline. Otherwise these fields can’t be accessed by any processor. - -See: elastic.co/guide/en/elasticsearch/reference/current/dot-expand-processor.html +# elasticstack_elasticsearch_ingest_processor_dot_expander (Data Source) +Helper data source which can be used to create the configuration for a dot expander processor. This processor expands a field with dots into an object field. See the [dot expand processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/dot-expand-processor.html) for more details. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_drop.md b/docs/data-sources/elasticsearch_ingest_processor_drop.md index cb7ebd9f8..c677749b7 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_drop.md +++ b/docs/data-sources/elasticsearch_ingest_processor_drop.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_drop Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_drop Data Source" description: |- - Helper data source to create a processor which drops the document without raising any errors. + Helper data source which can be used to create the configuration for a drop processor. This processor drops the document without raising any errors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/drop-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_drop - -Drops the document without raising any errors. This is useful to prevent the document from getting indexed based on some condition. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/drop-processor.html +# elasticstack_elasticsearch_ingest_processor_drop (Data Source) +Helper data source which can be used to create the configuration for a drop processor. This processor drops the document without raising any errors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/drop-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_enrich.md b/docs/data-sources/elasticsearch_ingest_processor_enrich.md index b1f66e565..4e4c61d14 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_enrich.md +++ b/docs/data-sources/elasticsearch_ingest_processor_enrich.md @@ -1,16 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_enrich Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_enrich Data Source" description: |- - Helper data source to create a processor which enriches documents with data from another index. + Helper data source which can be used to create the configuration for an enrich processor. The enrich processor can enrich documents with data from another index. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_enrich - -The enrich processor can enrich documents with data from another index. See enrich data section for more information about how to set this up. +# elasticstack_elasticsearch_ingest_processor_enrich (Data Source) -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-enriching-data.html and https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-processor.html +Helper data source which can be used to create the configuration for an enrich processor. The enrich processor can enrich documents with data from another index. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_fail.md b/docs/data-sources/elasticsearch_ingest_processor_fail.md index 3ae3b778b..3ebc14892 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_fail.md +++ b/docs/data-sources/elasticsearch_ingest_processor_fail.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_fail Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_fail Data Source" description: |- - Helper data source to create a processor which raises an exception. + Helper data source which can be used to create the configuration for a fail processor. This processor raises an exception. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fail-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_fail - -Raises an exception. This is useful for when you expect a pipeline to fail and want to relay a specific message to the requester. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fail-processor.html +# elasticstack_elasticsearch_ingest_processor_fail (Data Source) +Helper data source which can be used to create the configuration for a fail processor. This processor raises an exception. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fail-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_fingerprint.md b/docs/data-sources/elasticsearch_ingest_processor_fingerprint.md index f852e051f..264b60787 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_fingerprint.md +++ b/docs/data-sources/elasticsearch_ingest_processor_fingerprint.md @@ -1,16 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_fingerprint Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_fingerprint Data Source" description: |- - Helper data source to create a processor which computes a hash of the document’s content. + Helper data source which can be used to create the configuration for a fingerprint processor. This processor computes a hash of the document’s content. See the fingerprint processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/fingerprint-processor.html for more details. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_fingerprint - -Computes a hash of the document’s content. You can use this hash for content fingerprinting. +# elasticstack_elasticsearch_ingest_processor_fingerprint (Data Source) -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fingerprint-processor.html +Helper data source which can be used to create the configuration for a fingerprint processor. This processor computes a hash of the document’s content. See the [fingerprint processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/fingerprint-processor.html) for more details. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_foreach.md b/docs/data-sources/elasticsearch_ingest_processor_foreach.md index a448a7234..32aea4bdd 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_foreach.md +++ b/docs/data-sources/elasticsearch_ingest_processor_foreach.md @@ -1,22 +1,26 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_foreach Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_foreach Data Source" description: |- - Helper data source to create a processor which runs an ingest processor on each element of an array or object. + Helper data source which can be used to create the configuration for a foreach processor. This processor runs an ingest processor on each element of an array or object. See the foreach processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/foreach-processor.html for more details. + All ingest processors can run on array or object elements. However, if the number of elements is unknown, it can be cumbersome to process each one in the same way. + The foreach processor lets you specify a field containing array or object values and a processor to run on each element in the field. + Access keys and values + When iterating through an array or object, the foreach processor stores the current element’s value in the _ingest._value ingest metadata field. _ingest._value contains the entire element value, including any child fields. You can access child field values using dot notation on the _ingest._value field. + When iterating through an object, the foreach processor also stores the current element’s key as a string in _ingest._key. + You can access and change _ingest._key and _ingest._value in the processor. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_foreach +# elasticstack_elasticsearch_ingest_processor_foreach (Data Source) -Runs an ingest processor on each element of an array or object. +Helper data source which can be used to create the configuration for a foreach processor. This processor runs an ingest processor on each element of an array or object. See the [foreach processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/foreach-processor.html) for more details. All ingest processors can run on array or object elements. However, if the number of elements is unknown, it can be cumbersome to process each one in the same way. The `foreach` processor lets you specify a `field` containing array or object values and a `processor` to run on each element in the field. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/foreach-processor.html - - ### Access keys and values When iterating through an array or object, the foreach processor stores the current element’s value in the `_ingest._value` ingest metadata field. `_ingest._value` contains the entire element value, including any child fields. You can access child field values using dot notation on the `_ingest._value` field. @@ -25,8 +29,6 @@ When iterating through an object, the foreach processor also stores the current You can access and change `_ingest._key` and `_ingest._value` in the processor. - - ## Example Usage ```terraform diff --git a/docs/data-sources/elasticsearch_ingest_processor_geoip.md b/docs/data-sources/elasticsearch_ingest_processor_geoip.md index efec6890d..56cbd111c 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_geoip.md +++ b/docs/data-sources/elasticsearch_ingest_processor_geoip.md @@ -1,24 +1,24 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_geoip Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_geoip Data Source" description: |- - Helper data source to create a processor which adds information about the geographical location of an IPv4 or IPv6 address. + Helper data source which can be used to create the configuration for a geoip processor. The geoip processor adds information about the geographical location of an IPv4 or IPv6 address. See the geoip processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html for more details. + By default, the processor uses the GeoLite2 City, GeoLite2 Country, and GeoLite2 ASN GeoIP2 databases from MaxMind, shared under the CC BY-SA 4.0 license. Elasticsearch automatically downloads updates for these databases from the Elastic GeoIP endpoint: https://geoip.elastic.co/v1/database. To get download statistics for these updates, use the GeoIP stats API. + If your cluster can’t connect to the Elastic GeoIP endpoint or you want to manage your own updates, see Manage your own GeoIP2 database updates https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html#manage-geoip-database-updates. + If Elasticsearch can’t connect to the endpoint for 30 days all updated databases will become invalid. Elasticsearch will stop enriching documents with geoip data and will add tags: ["_geoip_expired_database"] field instead. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_geoip +# elasticstack_elasticsearch_ingest_processor_geoip (Data Source) -The geoip processor adds information about the geographical location of an IPv4 or IPv6 address. +Helper data source which can be used to create the configuration for a geoip processor. The geoip processor adds information about the geographical location of an IPv4 or IPv6 address. See the [geoip processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html) for more details. By default, the processor uses the GeoLite2 City, GeoLite2 Country, and GeoLite2 ASN GeoIP2 databases from MaxMind, shared under the CC BY-SA 4.0 license. Elasticsearch automatically downloads updates for these databases from the Elastic GeoIP endpoint: https://geoip.elastic.co/v1/database. To get download statistics for these updates, use the GeoIP stats API. If your cluster can’t connect to the Elastic GeoIP endpoint or you want to manage your own updates, [see Manage your own GeoIP2 database updates](https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html#manage-geoip-database-updates). -If Elasticsearch can’t connect to the endpoint for 30 days all updated databases will become invalid. Elasticsearch will stop enriching documents with geoip data and will add tags: ["_geoip_expired_database"] field instead. - - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html - +If Elasticsearch can’t connect to the endpoint for 30 days all updated databases will become invalid. Elasticsearch will stop enriching documents with geoip data and will add `tags: ["_geoip_expired_database"]` field instead. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_grok.md b/docs/data-sources/elasticsearch_ingest_processor_grok.md index 9a078fc26..4a002a33c 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_grok.md +++ b/docs/data-sources/elasticsearch_ingest_processor_grok.md @@ -1,23 +1,22 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_grok Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_grok Data Source" description: |- - Helper data source to create a processor which extracts structured fields out of a single text field within a document. + Helper data source which can be used to create the configuration for a grok processor. This processor extracts structured fields out of a single text field within a document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/grok-processor.html + This processor comes packaged with many reusable patterns https://github.com/elastic/elasticsearch/blob/master/libs/grok/src/main/resources/patterns. + If you need help building patterns to match your logs, you will find the Grok Debugger https://www.elastic.co/guide/en/kibana/master/xpack-grokdebugger.html tool quite useful! The Grok Constructor https://grokconstructor.appspot.com/ is also a useful tool. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_grok +# elasticstack_elasticsearch_ingest_processor_grok (Data Source) -Extracts structured fields out of a single text field within a document. You choose which field to extract matched fields from, as well as the grok pattern you expect will match. A grok pattern is like a regular expression that supports aliased expressions that can be reused. +Helper data source which can be used to create the configuration for a grok processor. This processor extracts structured fields out of a single text field within a document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/grok-processor.html This processor comes packaged with many [reusable patterns](https://github.com/elastic/elasticsearch/blob/master/libs/grok/src/main/resources/patterns). If you need help building patterns to match your logs, you will find the [Grok Debugger](https://www.elastic.co/guide/en/kibana/master/xpack-grokdebugger.html) tool quite useful! [The Grok Constructor](https://grokconstructor.appspot.com/) is also a useful tool. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/grok-processor.html - - ## Example Usage ```terraform diff --git a/docs/data-sources/elasticsearch_ingest_processor_gsub.md b/docs/data-sources/elasticsearch_ingest_processor_gsub.md index 3798599c7..e75bb2f4a 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_gsub.md +++ b/docs/data-sources/elasticsearch_ingest_processor_gsub.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_gsub Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_gsub Data Source" description: |- - Helper data source to create a processor which converts a string field by applying a regular expression and a replacement. + Helper data source which can be used to create the configuration for a gsub processor. This processor converts a string field by applying a regular expression and a replacement. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/gsub-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_gsub - -Converts a string field by applying a regular expression and a replacement. If the field is an array of string, all members of the array will be converted. If any non-string values are encountered, the processor will throw an exception. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/gsub-processor.html +# elasticstack_elasticsearch_ingest_processor_gsub (Data Source) +Helper data source which can be used to create the configuration for a gsub processor. This processor converts a string field by applying a regular expression and a replacement. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/gsub-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_html_strip.md b/docs/data-sources/elasticsearch_ingest_processor_html_strip.md index ba34acda0..171b76ed8 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_html_strip.md +++ b/docs/data-sources/elasticsearch_ingest_processor_html_strip.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_html_strip Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_html_strip Data Source" description: |- - Helper data source to create a processor which removes HTML tags from the field. + Helper data source which can be used to create the configuration for an HTML strip processor. This processor removes HTML tags from the field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/htmlstrip-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_html_strip - -Removes HTML tags from the field. If the field is an array of strings, HTML tags will be removed from all members of the array. - -See: templates/data-sources/elasticsearch_ingest_processor_html_strip.md.tmpl +# elasticstack_elasticsearch_ingest_processor_html_strip (Data Source) +Helper data source which can be used to create the configuration for an HTML strip processor. This processor removes HTML tags from the field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/htmlstrip-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_join.md b/docs/data-sources/elasticsearch_ingest_processor_join.md index 866178a67..a46139936 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_join.md +++ b/docs/data-sources/elasticsearch_ingest_processor_join.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_join Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_join Data Source" description: |- - Helper data source to create a processor which joins each element of an array into a single string using a separator character between each element. + Helper data source which can be used to create the configuration for a join processor. This processor joins each element of an array into a single string using a separator character between each element. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/join-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_join - -Joins each element of an array into a single string using a separator character between each element. Throws an error when the field is not an array. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/join-processor.html +# elasticstack_elasticsearch_ingest_processor_join (Data Source) +Helper data source which can be used to create the configuration for a join processor. This processor joins each element of an array into a single string using a separator character between each element. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/join-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_json.md b/docs/data-sources/elasticsearch_ingest_processor_json.md index f7b3d3c5a..e2a1e8989 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_json.md +++ b/docs/data-sources/elasticsearch_ingest_processor_json.md @@ -1,16 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_json Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_json Data Source" description: |- - Helper data source to create a processor which converts a JSON string into a structured JSON object. + Helper data source which can be used to create the configuration for a JSON processor. This processor converts a JSON string into a structured JSON object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/json-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_json - -Converts a JSON string into a structured JSON object. +# elasticstack_elasticsearch_ingest_processor_json (Data Source) -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/json-processor.html +Helper data source which can be used to create the configuration for a JSON processor. This processor converts a JSON string into a structured JSON object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/json-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_kv.md b/docs/data-sources/elasticsearch_ingest_processor_kv.md index 7dc000a0a..c3c901e98 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_kv.md +++ b/docs/data-sources/elasticsearch_ingest_processor_kv.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_kv Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_kv Data Source" description: |- - Helper data source to create a processor which helps automatically parse messages (or specific event fields) which are of the `foo=bar` variety. + Helper data source which can be used to create the configuration for a KV processor. This processor helps automatically parse messages (or specific event fields) which are of the foo=bar variety. See the KV processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/kv-processor.html for more details. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_kv - -This processor helps automatically parse messages (or specific event fields) which are of the `foo=bar` variety. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/kv-processor.html +# elasticstack_elasticsearch_ingest_processor_kv (Data Source) +Helper data source which can be used to create the configuration for a KV processor. This processor helps automatically parse messages (or specific event fields) which are of the foo=bar variety. See the [KV processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/kv-processor.html) for more details. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_lowercase.md b/docs/data-sources/elasticsearch_ingest_processor_lowercase.md index b8f6c903b..0e030f1cd 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_lowercase.md +++ b/docs/data-sources/elasticsearch_ingest_processor_lowercase.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_lowercase Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_lowercase Data Source" description: |- - Helper data source to create a processor which converts a string to its lowercase equivalent. + Helper data source which can be used to create the configuration for a lowercase processor. This processor converts a string to its lowercase equivalent. If the field is an array of strings, all members of the array will be converted. See the lowercase processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/lowercase-processor.html for more details. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_lowercase - -Converts a string to its lowercase equivalent. If the field is an array of strings, all members of the array will be converted. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/lowercase-processor.html +# elasticstack_elasticsearch_ingest_processor_lowercase (Data Source) +Helper data source which can be used to create the configuration for a lowercase processor. This processor converts a string to its lowercase equivalent. If the field is an array of strings, all members of the array will be converted. See the [lowercase processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/lowercase-processor.html) for more details. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_network_direction.md b/docs/data-sources/elasticsearch_ingest_processor_network_direction.md index 7ab772a41..956158833 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_network_direction.md +++ b/docs/data-sources/elasticsearch_ingest_processor_network_direction.md @@ -1,21 +1,26 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_network_direction Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_network_direction Data Source" description: |- - Helper data source to create a processor which calculates the network direction given a source IP address, destination IP address, and a list of internal networks. + Helper data source which can be used to create the configuration for a network direction processor. This processor calculates the network direction given a source IP address, destination IP address, and a list of internal networks. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/network-direction-processor.html + The network direction processor reads IP addresses from Elastic Common Schema (ECS) fields by default. If you use the ECS, only the internal_networks option must be specified. + One of either internal_networks or internal_networks_field must be specified. If internal_networks_field is specified, it follows the behavior specified by ignore_missing. + Supported named network ranges + The named ranges supported for the internal_networks option are: + loopback - Matches loopback addresses in the range of 127.0.0.0/8 or ::1/128.unicast or global_unicast - Matches global unicast addresses defined in RFC 1122, RFC 4632, and RFC 4291 with the exception of the IPv4 broadcast address (255.255.255.255). This includes private address ranges.multicast - Matches multicast addresses.interface_local_multicast - Matches IPv6 interface-local multicast addresses.link_local_unicast - Matches link-local unicast addresses.link_local_multicast - Matches link-local multicast addresses.private - Matches private address ranges defined in RFC 1918 (IPv4) and RFC 4193 (IPv6).public - Matches addresses that are not loopback, unspecified, IPv4 broadcast, link local unicast, link local multicast, interface local multicast, or private.unspecified - Matches unspecified addresses (either the IPv4 address "0.0.0.0" or the IPv6 address "::"). --- -# Data Source: elasticstack_elasticsearch_ingest_processor_network_direction +# elasticstack_elasticsearch_ingest_processor_network_direction (Data Source) -Calculates the network direction given a source IP address, destination IP address, and a list of internal networks. +Helper data source which can be used to create the configuration for a network direction processor. This processor calculates the network direction given a source IP address, destination IP address, and a list of internal networks. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/network-direction-processor.html The network direction processor reads IP addresses from Elastic Common Schema (ECS) fields by default. If you use the ECS, only the `internal_networks` option must be specified. - One of either `internal_networks` or `internal_networks_field` must be specified. If `internal_networks_field` is specified, it follows the behavior specified by `ignore_missing`. -### Supported named network rangese +### Supported named network ranges The named ranges supported for the internal_networks option are: @@ -29,10 +34,6 @@ The named ranges supported for the internal_networks option are: * `public` - Matches addresses that are not loopback, unspecified, IPv4 broadcast, link local unicast, link local multicast, interface local multicast, or private. * `unspecified` - Matches unspecified addresses (either the IPv4 address "0.0.0.0" or the IPv6 address "::"). - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/network-direction-processor.html - - ## Example Usage ```terraform diff --git a/docs/data-sources/elasticsearch_ingest_processor_pipeline.md b/docs/data-sources/elasticsearch_ingest_processor_pipeline.md index 4374db6ce..8cf90edd5 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_pipeline.md +++ b/docs/data-sources/elasticsearch_ingest_processor_pipeline.md @@ -1,19 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_pipeline Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_pipeline Data Source" description: |- - Helper data source to create a processor which executes another pipeline. + Helper data source which can be used to create the configuration for a pipeline processor. This processor executes another pipeline. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/pipeline-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_pipeline - -Executes another pipeline. - -The name of the current pipeline can be accessed from the `_ingest.pipeline` ingest metadata key. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/pipeline-processor.html +# elasticstack_elasticsearch_ingest_processor_pipeline (Data Source) +Helper data source which can be used to create the configuration for a pipeline processor. This processor executes another pipeline. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/pipeline-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_registered_domain.md b/docs/data-sources/elasticsearch_ingest_processor_registered_domain.md index 215a0be3a..0641c9983 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_registered_domain.md +++ b/docs/data-sources/elasticsearch_ingest_processor_registered_domain.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_registered_domain Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_registered_domain Data Source" description: |- - Helper data source to create a processor which Extracts the registered domain, sub-domain, and top-level domain from a fully qualified domain name. + Helper data source which can be used to create the configuration for a registered domain processor. This processor extracts the registered domain (also known as the effective top-level domain or eTLD), sub-domain, and top-level domain from a fully qualified domain name (FQDN). See: https://www.elastic.co/guide/en/elasticsearch/reference/current/registered-domain-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_registered_domain - -Extracts the registered domain (also known as the effective top-level domain or eTLD), sub-domain, and top-level domain from a fully qualified domain name (FQDN). Uses the registered domains defined in the Mozilla Public Suffix List. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/registered-domain-processor.html +# elasticstack_elasticsearch_ingest_processor_registered_domain (Data Source) +Helper data source which can be used to create the configuration for a registered domain processor. This processor extracts the registered domain (also known as the effective top-level domain or eTLD), sub-domain, and top-level domain from a fully qualified domain name (FQDN). See: https://www.elastic.co/guide/en/elasticsearch/reference/current/registered-domain-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_remove.md b/docs/data-sources/elasticsearch_ingest_processor_remove.md index 5a5a1984c..d1db4f7e3 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_remove.md +++ b/docs/data-sources/elasticsearch_ingest_processor_remove.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_remove Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_remove Data Source" description: |- - Helper data source to create a processor which removes existing fields. + Helper data source which can be used to create the configuration for a remove processor. This processor removes existing fields. See the remove processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/remove-processor.html for more details. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_remove - -Removes existing fields. If one field doesn’t exist, an exception will be thrown. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/remove-processor.html +# elasticstack_elasticsearch_ingest_processor_remove (Data Source) +Helper data source which can be used to create the configuration for a remove processor. This processor removes existing fields. See the [remove processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/remove-processor.html) for more details. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_rename.md b/docs/data-sources/elasticsearch_ingest_processor_rename.md index f1268b4f5..57effd437 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_rename.md +++ b/docs/data-sources/elasticsearch_ingest_processor_rename.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_rename Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_rename Data Source" description: |- - Helper data source to create a processor which renames an existing field. + Helper data source which can be used to create the configuration for a rename processor. This processor renames an existing field. See the rename processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/rename-processor.html for more details. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_rename - -Renames an existing field. If the field doesn’t exist or the new name is already used, an exception will be thrown. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/rename-processor.html +# elasticstack_elasticsearch_ingest_processor_rename (Data Source) +Helper data source which can be used to create the configuration for a rename processor. This processor renames an existing field. See the [rename processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/rename-processor.html) for more details. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_reroute.md b/docs/data-sources/elasticsearch_ingest_processor_reroute.md index bcb5a9bb0..32b763744 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_reroute.md +++ b/docs/data-sources/elasticsearch_ingest_processor_reroute.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_reroute Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_reroute Data Source" description: |- - Helper data source to create a processor which reroutes a document to a different data stream, index, or index alias. + Helper data source which can be used to create the configuration for a reroute processor. This processor reroutes a document to a different data stream, index, or index alias. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/reroute-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_reroute - -Reroutes a document to a different data stream, index, or index alias. This processor is useful for routing documents based on data stream routing rules. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/reroute-processor.html +# elasticstack_elasticsearch_ingest_processor_reroute (Data Source) +Helper data source which can be used to create the configuration for a reroute processor. This processor reroutes a document to a different data stream, index, or index alias. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/reroute-processor.html ## Example Usage @@ -52,4 +50,4 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { ### Read-Only - `id` (String) Internal identifier of the resource. -- `json` (String) JSON representation of this data source. \ No newline at end of file +- `json` (String) JSON representation of this data source. diff --git a/docs/data-sources/elasticsearch_ingest_processor_script.md b/docs/data-sources/elasticsearch_ingest_processor_script.md index b52d3cb41..fda8e951d 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_script.md +++ b/docs/data-sources/elasticsearch_ingest_processor_script.md @@ -1,27 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_script Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_script Data Source" description: |- - Helper data source to create a processor which runs an inline or stored script on incoming documents. + Helper data source which can be used to create the configuration for a script processor. This processor runs an inline or stored script on incoming documents. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/script-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_script - -Runs an inline or stored script on incoming documents. The script runs in the ingest context. - -The script processor uses the script cache to avoid recompiling the script for each incoming document. To improve performance, ensure the script cache is properly sized before using a script processor in production. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/script-processor.html - -### Access source fields - -The script processor parses each incoming document’s JSON source fields into a set of maps, lists, and primitives. To access these fields with a Painless script, use the map access operator: `ctx['my-field']`. You can also use the shorthand `ctx.` syntax. - -### Access metadata fields - -You can also use a script processor to access metadata fields. +# elasticstack_elasticsearch_ingest_processor_script (Data Source) +Helper data source which can be used to create the configuration for a script processor. This processor runs an inline or stored script on incoming documents. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/script-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_set.md b/docs/data-sources/elasticsearch_ingest_processor_set.md index 6eeab00e1..bf6b65838 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_set.md +++ b/docs/data-sources/elasticsearch_ingest_processor_set.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_set Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_set Data Source" description: |- - Helper data source to create a processor which sets one field and associates it with the specified value. + Helper data source which can be used to create the configuration for a set processor. This processor sets one field and associates it with the specified value. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/set-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_set - -Sets one field and associates it with the specified value. If the field already exists, its value will be replaced with the provided one. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/set-processor.html +# elasticstack_elasticsearch_ingest_processor_set (Data Source) +Helper data source which can be used to create the configuration for a set processor. This processor sets one field and associates it with the specified value. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/set-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_set_security_user.md b/docs/data-sources/elasticsearch_ingest_processor_set_security_user.md index 62249a9f0..2586e6470 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_set_security_user.md +++ b/docs/data-sources/elasticsearch_ingest_processor_set_security_user.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_set_security_user Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_set_security_user Data Source" description: |- - Helper data source to create a processor which sets user-related details from the current authenticated user to the current document by pre-processing the ingest. + Helper data source which can be used to create the configuration for a set security user processor. This processor sets user-related details (such as username, roles, email, full_name, metadata, api_key, realm and authentication_type) from the current authenticated user to the current document by pre-processing the ingest. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-node-set-security-user-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_set_security_user - -Sets user-related details (such as `username`, `roles`, `email`, `full_name`, `metadata`, `api_key`, `realm` and `authentication_typ`e) from the current authenticated user to the current document by pre-processing the ingest. The `api_key` property exists only if the user authenticates with an API key. It is an object containing the id, name and metadata (if it exists and is non-empty) fields of the API key. The realm property is also an object with two fields, name and type. When using API key authentication, the realm property refers to the realm from which the API key is created. The `authentication_type property` is a string that can take value from `REALM`, `API_KEY`, `TOKEN` and `ANONYMOUS`. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-node-set-security-user-processor.html +# elasticstack_elasticsearch_ingest_processor_set_security_user (Data Source) +Helper data source which can be used to create the configuration for a set security user processor. This processor sets user-related details (such as username, roles, email, full_name, metadata, api_key, realm and authentication_type) from the current authenticated user to the current document by pre-processing the ingest. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-node-set-security-user-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_sort.md b/docs/data-sources/elasticsearch_ingest_processor_sort.md index c4c240503..a565104b4 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_sort.md +++ b/docs/data-sources/elasticsearch_ingest_processor_sort.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_sort Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_sort Data Source" description: |- - Helper data source to create a processor which sorts the elements of an array ascending or descending. + Helper data source which can be used to create the configuration for a sort processor. This processor sorts the elements of an array ascending or descending. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_sort - -Sorts the elements of an array ascending or descending. Homogeneous arrays of numbers will be sorted numerically, while arrays of strings or heterogeneous arrays of strings + numbers will be sorted lexicographically. Throws an error when the field is not an array. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-processor.html +# elasticstack_elasticsearch_ingest_processor_sort (Data Source) +Helper data source which can be used to create the configuration for a sort processor. This processor sorts the elements of an array ascending or descending. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_split.md b/docs/data-sources/elasticsearch_ingest_processor_split.md index d8f318509..c95f3b878 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_split.md +++ b/docs/data-sources/elasticsearch_ingest_processor_split.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_split Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_split Data Source" description: |- - Helper data source to create a processor which splits a field into an array using a separator character. + Helper data source which can be used to create the configuration for a split processor. This processor splits a field into an array using a separator character. See the split processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/split-processor.html for more details. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_split - -Splits a field into an array using a separator character. Only works on string fields. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/split-processor.html +# elasticstack_elasticsearch_ingest_processor_split (Data Source) +Helper data source which can be used to create the configuration for a split processor. This processor splits a field into an array using a separator character. See the [split processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/split-processor.html) for more details. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_trim.md b/docs/data-sources/elasticsearch_ingest_processor_trim.md index 4f230cff9..224fe5b99 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_trim.md +++ b/docs/data-sources/elasticsearch_ingest_processor_trim.md @@ -1,19 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_trim Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_trim Data Source" description: |- - Helper data source to create a processor which trims whitespace from field. + Helper data source which can be used to create the configuration for a trim processor. This processor trims whitespace from field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/trim-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_trim - -Trims whitespace from field. If the field is an array of strings, all members of the array will be trimmed. - -**NOTE:** This only works on leading and trailing whitespace. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/trim-processor.html +# elasticstack_elasticsearch_ingest_processor_trim (Data Source) +Helper data source which can be used to create the configuration for a trim processor. This processor trims whitespace from field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/trim-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_uppercase.md b/docs/data-sources/elasticsearch_ingest_processor_uppercase.md index 6954ed14c..21b3c7fb9 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_uppercase.md +++ b/docs/data-sources/elasticsearch_ingest_processor_uppercase.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_uppercase Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_uppercase Data Source" description: |- - Helper data source to create a processor which converts a string to its uppercase equivalent. + Helper data source which can be used to create the configuration for an uppercase processor. This processor converts a string to its uppercase equivalent. If the field is an array of strings, all members of the array will be converted. See the uppercase processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/uppercase-processor.html for more details. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_uppercase - -Converts a string to its uppercase equivalent. If the field is an array of strings, all members of the array will be converted. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uppercase-processor.html +# elasticstack_elasticsearch_ingest_processor_uppercase (Data Source) +Helper data source which can be used to create the configuration for an uppercase processor. This processor converts a string to its uppercase equivalent. If the field is an array of strings, all members of the array will be converted. See the [uppercase processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/uppercase-processor.html) for more details. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_uri_parts.md b/docs/data-sources/elasticsearch_ingest_processor_uri_parts.md index 5867f8baf..2e6db8a25 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_uri_parts.md +++ b/docs/data-sources/elasticsearch_ingest_processor_uri_parts.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_uri_parts Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_uri_parts Data Source" description: |- - Helper data source to create a processor which parses a Uniform Resource Identifier (URI) string and extracts its components as an object. + Helper data source which can be used to create the configuration for a URI parts processor. This processor parses a Uniform Resource Identifier (URI) string and extracts its components as an object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uri-parts-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_uri_parts - -Parses a Uniform Resource Identifier (URI) string and extracts its components as an object. This URI object includes properties for the URI’s domain, path, fragment, port, query, scheme, user info, username, and password. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uri-parts-processor.html +# elasticstack_elasticsearch_ingest_processor_uri_parts (Data Source) +Helper data source which can be used to create the configuration for a URI parts processor. This processor parses a Uniform Resource Identifier (URI) string and extracts its components as an object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uri-parts-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_urldecode.md b/docs/data-sources/elasticsearch_ingest_processor_urldecode.md index e8dae0d43..1e32ea90f 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_urldecode.md +++ b/docs/data-sources/elasticsearch_ingest_processor_urldecode.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_urldecode Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_urldecode Data Source" description: |- - Helper data source to create a processor which URL-decodes a string. + Helper data source which can be used to create the configuration for a URL-decode processor. This processor URL-decodes a string. See the URL decode processor documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/urldecode-processor.html for more details. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_urldecode - -URL-decodes a string. If the field is an array of strings, all members of the array will be decoded. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/urldecode-processor.html +# elasticstack_elasticsearch_ingest_processor_urldecode (Data Source) +Helper data source which can be used to create the configuration for a URL-decode processor. This processor URL-decodes a string. See the [URL decode processor documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/urldecode-processor.html) for more details. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_user_agent.md b/docs/data-sources/elasticsearch_ingest_processor_user_agent.md index 1c728515b..3d07503f0 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_user_agent.md +++ b/docs/data-sources/elasticsearch_ingest_processor_user_agent.md @@ -1,20 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_user_agent Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_user_agent Data Source" description: |- - Helper data source to create a processor which extracts details from the user agent string a browser sends with its web requests. + Helper data source which can be used to create the configuration for a user agent processor. This processor extracts details from the user agent string a browser sends with its web requests. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/user-agent-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_user_agent - -The `user_agent` processor extracts details from the user agent string a browser sends with its web requests. This processor adds this information by default under the `user_agent` field. - -The ingest-user-agent module ships by default with the regexes.yaml made available by uap-java with an Apache 2.0 license. For more details see https://github.com/ua-parser/uap-core. - - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/user-agent-processor.html +# elasticstack_elasticsearch_ingest_processor_user_agent (Data Source) +Helper data source which can be used to create the configuration for a user agent processor. This processor extracts details from the user agent string a browser sends with its web requests. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/user-agent-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_security_role.md b/docs/data-sources/elasticsearch_security_role.md index f41c364c0..4f50c343b 100644 --- a/docs/data-sources/elasticsearch_security_role.md +++ b/docs/data-sources/elasticsearch_security_role.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_security_role Data Source - terraform-provider-elasticstack" subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_role Data Source" description: |- - Retrieves roles in the native realm. + Retrieves roles in the native realm. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html --- -# Data Source: elasticstack_elasticsearch_security_role +# elasticstack_elasticsearch_security_role (Data Source) -Use this data source to get information about an existing Elasticsearch role. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html +Retrieves roles in the native realm. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_security_role_mapping.md b/docs/data-sources/elasticsearch_security_role_mapping.md index 01c6ca5ab..83eef6d35 100644 --- a/docs/data-sources/elasticsearch_security_role_mapping.md +++ b/docs/data-sources/elasticsearch_security_role_mapping.md @@ -1,12 +1,13 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_security_role_mapping Data Source - terraform-provider-elasticstack" subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_role_mapping Data Source" description: |- - Retrieves role mappings. + Retrieves role mappings. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html --- -# Data Source: elasticstack_elasticsearch_security_role_mapping +# elasticstack_elasticsearch_security_role_mapping (Data Source) Retrieves role mappings. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html diff --git a/docs/data-sources/elasticsearch_security_user.md b/docs/data-sources/elasticsearch_security_user.md index 6fc2c049c..5d180e67a 100644 --- a/docs/data-sources/elasticsearch_security_user.md +++ b/docs/data-sources/elasticsearch_security_user.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_security_user Data Source - terraform-provider-elasticstack" subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_user Data Source" description: |- - Gets information about Elasticsearch user. + Get the information about the user in the ES cluster. See the security API get user documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user.html for more details. --- -# Data Source: elasticstack_elasticsearch_security_user +# elasticstack_elasticsearch_security_user (Data Source) -Use this data source to get information about existing Elasticsearch user. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user.html". +Get the information about the user in the ES cluster. See the [security API get user documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user.html) for more details. ## Example Usage diff --git a/docs/data-sources/elasticsearch_snapshot_repository.md b/docs/data-sources/elasticsearch_snapshot_repository.md index 7004d0ef4..9987b6e1e 100644 --- a/docs/data-sources/elasticsearch_snapshot_repository.md +++ b/docs/data-sources/elasticsearch_snapshot_repository.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_snapshot_repository Data Source - terraform-provider-elasticstack" subcategory: "Snapshot" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_snapshot_repository Data Source" description: |- Gets information about the registered snapshot repositories. --- -# Data Source: elasticstack_elasticsearch_snapshot_repository +# elasticstack_elasticsearch_snapshot_repository (Data Source) -This data source provides the information about the registered snaphosts repositories +Gets information about the registered snapshot repositories. ## Example Usage diff --git a/docs/data-sources/fleet_enrollment_tokens.md b/docs/data-sources/fleet_enrollment_tokens.md index 215ba3621..bfe6753e9 100644 --- a/docs/data-sources/fleet_enrollment_tokens.md +++ b/docs/data-sources/fleet_enrollment_tokens.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_fleet_enrollment_tokens Data Source - terraform-provider-elasticstack" subcategory: "Fleet" -layout: "" -page_title: "Elasticstack: elasticstack_fleet_enrollment_tokens Data Source" description: |- - Gets information about Fleet Enrollment Tokens. See https://www.elastic.co/guide/en/fleet/current/fleet-enrollment-tokens.html + Retrieves Elasticsearch API keys used to enroll Elastic Agents in Fleet. See the Fleet enrollment tokens documentation https://www.elastic.co/guide/en/fleet/current/fleet-enrollment-tokens.html for more details. --- -# Data Source: elasticstack_fleet_enrollment_tokens +# elasticstack_fleet_enrollment_tokens (Data Source) -This data source provides information about Fleet Enrollment Tokens. +Retrieves Elasticsearch API keys used to enroll Elastic Agents in Fleet. See the [Fleet enrollment tokens documentation](https://www.elastic.co/guide/en/fleet/current/fleet-enrollment-tokens.html) for more details. ## Example Usage diff --git a/docs/data-sources/fleet_integration.md b/docs/data-sources/fleet_integration.md index 4b329781c..9a9493cf1 100644 --- a/docs/data-sources/fleet_integration.md +++ b/docs/data-sources/fleet_integration.md @@ -1,12 +1,21 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_fleet_integration Data Source - terraform-provider-elasticstack" subcategory: "Fleet" -layout: "" -page_title: "Elasticstack: elasticstack_fleet_integration Data Source" description: |- - Gets information about a Fleet integration package. + This data source provides information about a Fleet integration package. Currently, + the data source will retrieve the latest available version of the package. Version + selection is determined by the Fleet API, which is currently based on semantic + versioning. + By default, the highest GA release version will be selected. If a + package is not GA (the version is below 1.0.0) or if a new non-GA version of the + package is to be selected (i.e., the GA version of the package is 1.5.0, but there's + a new 1.5.1-beta version available), then the prerelease parameter in the plan + should be set to true. --- -# Data Source: elasticstack_fleet_integration +# elasticstack_fleet_integration (Data Source) This data source provides information about a Fleet integration package. Currently, the data source will retrieve the latest available version of the package. Version diff --git a/docs/data-sources/kibana_action_connector.md b/docs/data-sources/kibana_action_connector.md index 5513b5d70..ed5b3b505 100644 --- a/docs/data-sources/kibana_action_connector.md +++ b/docs/data-sources/kibana_action_connector.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_kibana_action_connector Data Source - terraform-provider-elasticstack" subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_action_connector Data Source" description: |- - Retrieve a specific action connector role. See https://www.elastic.co/guide/en/kibana/current/get-all-connectors-api.html. + Search for a connector by name, space id, and type. Note, that this data source will fail if more than one connector shares the same name. --- -# Data Source: elasticstack_kibana_action_connector +# elasticstack_kibana_action_connector (Data Source) -Use this data source to get information about an existing action connector. +Search for a connector by name, space id, and type. Note, that this data source will fail if more than one connector shares the same name. ## Example Usage diff --git a/docs/data-sources/kibana_security_role.md b/docs/data-sources/kibana_security_role.md index f2db9f711..8f145f918 100644 --- a/docs/data-sources/kibana_security_role.md +++ b/docs/data-sources/kibana_security_role.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_kibana_security_role Data Source - terraform-provider-elasticstack" subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_security_role Data Source" description: |- - Retrieve a specific Kibana role. See https://www.elastic.co/guide/en/kibana/master/role-management-specific-api-get.html + Retrieve a specific role. See the role management API documentation https://www.elastic.co/guide/en/kibana/current/role-management-specific-api-get.html for more details. --- -# Data Source: elasticstack_kibana_security_role +# elasticstack_kibana_security_role (Data Source) -Use this data source to get information about an existing Kibana role. +Retrieve a specific role. See the [role management API documentation](https://www.elastic.co/guide/en/kibana/current/role-management-specific-api-get.html) for more details. ## Example Usage diff --git a/docs/data-sources/kibana_spaces.md b/docs/data-sources/kibana_spaces.md index efcfdc282..df1d7fb28 100644 --- a/docs/data-sources/kibana_spaces.md +++ b/docs/data-sources/kibana_spaces.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_kibana_spaces Data Source - terraform-provider-elasticstack" subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_spaces Data Source" description: |- - Retrieve all Kibana spaces. See https://www.elastic.co/guide/en/kibana/master/spaces-api-get-all.html + Use this data source to retrieve and get information about all existing Kibana spaces. See https://www.elastic.co/guide/en/kibana/master/spaces-api-get-all.html --- -# Data Source: elasticstack_kibana_spaces +# elasticstack_kibana_spaces (Data Source) -Use this data source to retrieve and get information about all existing Kibana spaces. +Use this data source to retrieve and get information about all existing Kibana spaces. See https://www.elastic.co/guide/en/kibana/master/spaces-api-get-all.html ## Example Usage diff --git a/docs/resources/apm_agent_configuration.md b/docs/resources/apm_agent_configuration.md index 8df89ceae..df99f512e 100644 --- a/docs/resources/apm_agent_configuration.md +++ b/docs/resources/apm_agent_configuration.md @@ -1,14 +1,15 @@ + --- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_apm_agent_configuration Resource" +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_apm_agent_configuration Resource - terraform-provider-elasticstack" +subcategory: "APM" description: |- - Creates or updates an APM agent configuration + Creates or updates an APM agent configuration. See https://www.elastic.co/docs/solutions/observability/apm/apm-agent-central-configuration. --- -# Resource: elasticstack_apm_agent_configuration +# elasticstack_apm_agent_configuration (Resource) -Creates or updates an APM agent configuration. See https://www.elastic.co/docs/solutions/observability/apm/apm-agent-central-configuration +Creates or updates an APM agent configuration. See https://www.elastic.co/docs/solutions/observability/apm/apm-agent-central-configuration. ## Example Usage @@ -49,6 +50,8 @@ resource "elasticstack_apm_agent_configuration" "test_config" { Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_apm_agent_configuration.test_configuration my-service:production ``` diff --git a/docs/resources/elasticsearch_cluster_settings.md b/docs/resources/elasticsearch_cluster_settings.md index bd591363e..8c675c24c 100644 --- a/docs/resources/elasticsearch_cluster_settings.md +++ b/docs/resources/elasticsearch_cluster_settings.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_cluster_settings Resource - terraform-provider-elasticstack" subcategory: "Cluster" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_cluster_settings Resource" description: |- - Updates cluster-wide settings. + Updates cluster-wide settings. If the Elasticsearch security features are enabled, you must have the manage cluster privilege to use this API. See the cluster settings documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html for more details. --- -# Resource: elasticstack_elasticsearch_cluster_settings +# elasticstack_elasticsearch_cluster_settings (Resource) -Updates cluster-wide settings. If the Elasticsearch security features are enabled, you must have the manage cluster privilege to use this API. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html +Updates cluster-wide settings. If the Elasticsearch security features are enabled, you must have the manage cluster privilege to use this API. See the [cluster settings documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html) for more details. ## Example Usage diff --git a/docs/resources/elasticsearch_component_template.md b/docs/resources/elasticsearch_component_template.md index 441232654..c835fa40d 100644 --- a/docs/resources/elasticsearch_component_template.md +++ b/docs/resources/elasticsearch_component_template.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_component_template Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_component_template Resource" description: |- - Creates or updates a component template. + Creates or updates a component template. Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. See the component template documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html for more details. --- -# Resource: elasticstack_elasticsearch_component_template +# elasticstack_elasticsearch_component_template (Resource) -Creates or updates a component template. Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html +Creates or updates a component template. Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. See the [component template documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html) for more details. ## Example Usage @@ -64,14 +65,14 @@ Optional: - `alias` (Block Set) Alias to add. (see [below for nested schema](#nestedblock--template--alias)) - `mappings` (String) Mapping for fields in the index. Should be specified as a JSON object of field mappings. See the documentation (https://www.elastic.co/guide/en/elasticsearch/reference/current/explicit-mapping.html) for more details -- `settings` (String) Configuration options for the index. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/index-modules.html#index-modules-settings +- `settings` (String) Configuration options for the index. See the [index modules settings documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/index-modules.html#index-modules-settings) for more details. ### Nested Schema for `template.alias` Required: -- `name` (String) The alias name. Index alias names support date math. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/date-math-index-names.html +- `name` (String) The alias name. Index alias names support date math. See the [date math index names documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/date-math-index-names.html) for more details. Optional: @@ -108,6 +109,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_component_template.my_template / ``` diff --git a/docs/resources/elasticsearch_data_stream.md b/docs/resources/elasticsearch_data_stream.md index 6cfb9e88d..2402ab784 100644 --- a/docs/resources/elasticsearch_data_stream.md +++ b/docs/resources/elasticsearch_data_stream.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_data_stream Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_data_stream Resource" description: |- - Manages Elasticsearch Data Streams + Managing Elasticsearch data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html --- -# Resource: elasticstack_elasticsearch_data_stream +# elasticstack_elasticsearch_data_stream (Resource) -Manages data streams. This resource can create, delete and show the information about the created data stream. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html +Managing Elasticsearch data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html ## Example Usage @@ -123,6 +124,8 @@ Read-Only: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_data_stream.my_data_stream / ``` diff --git a/docs/resources/elasticsearch_data_stream_lifecycle.md b/docs/resources/elasticsearch_data_stream_lifecycle.md index c06eb8096..cbb14db59 100644 --- a/docs/resources/elasticsearch_data_stream_lifecycle.md +++ b/docs/resources/elasticsearch_data_stream_lifecycle.md @@ -1,12 +1,13 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_data_stream_lifecycle Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_data_stream_lifecycle Resource" description: |- - Manages Lifecycle for Elasticsearch Data Streams + Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html --- -# Resource: elasticstack_elasticsearch_data_stream_lifecycle +# elasticstack_elasticsearch_data_stream_lifecycle (Resource) Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html @@ -105,6 +106,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_data_stream_lifecycle.my_data_stream_lifecycle / ``` diff --git a/docs/resources/elasticsearch_enrich_policy.md b/docs/resources/elasticsearch_enrich_policy.md index 1845fc54e..9e0a7ba52 100644 --- a/docs/resources/elasticsearch_enrich_policy.md +++ b/docs/resources/elasticsearch_enrich_policy.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_enrich_policy Resource - terraform-provider-elasticstack" subcategory: "Enrich" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_enrich_policy" description: |- - Managing Elasticsearch enrich policies, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html + Managing Elasticsearch enrich policies. See the enrich API documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html for more details. --- -# Resource: elasticstack_elasticsearch_enrich_policy +# elasticstack_elasticsearch_enrich_policy (Resource) -Creates or updates enrich policies, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html +Managing Elasticsearch enrich policies. See the [enrich API documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html) for more details. ## Example Usage @@ -90,8 +91,10 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell # NOTE: while importing index resource, keep in mind, that some of the default index settings will be imported into the TF state too # You can later adjust the index configuration to account for those imported settings terraform import elasticstack_elasticsearch_enrich_policy.policy1 / -``` \ No newline at end of file +``` diff --git a/docs/resources/elasticsearch_index.md b/docs/resources/elasticsearch_index.md index da59dd302..15e5f3377 100644 --- a/docs/resources/elasticsearch_index.md +++ b/docs/resources/elasticsearch_index.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_index Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index Resource" description: |- - Creates or updates an index. + Creates Elasticsearch indices. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html --- -# Resource: elasticstack_elasticsearch_index +# elasticstack_elasticsearch_index (Resource) -Creates or updates an index. This resource can define settings, mappings and aliases. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html +Creates Elasticsearch indices. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html ## Example Usage @@ -190,13 +191,10 @@ Required: ## Import -**NOTE:** While importing index resource, keep in mind, that some of the default index settings will be imported into the TF state too. -You can later adjust the index configuration to account for those imported settings. - -Some of the default settings, which could be imported are: `index.number_of_replicas`, `index.number_of_shards` and `index.routing.allocation.include._tier_preference`. - Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell # NOTE: while importing index resource, keep in mind, that some of the default index settings will be imported into the TF state too # You can later adjust the index configuration to account for those imported settings diff --git a/docs/resources/elasticsearch_index_lifecycle.md b/docs/resources/elasticsearch_index_lifecycle.md index 99c241035..efbffa2ff 100644 --- a/docs/resources/elasticsearch_index_lifecycle.md +++ b/docs/resources/elasticsearch_index_lifecycle.md @@ -1,12 +1,13 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_index_lifecycle Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index_lifecycle Resource" description: |- - Creates or updates lifecycle policy. + Creates or updates lifecycle policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-put-lifecycle.html and https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-index-lifecycle.html --- -# Resource: elasticstack_elasticsearch_index_lifecycle +# elasticstack_elasticsearch_index_lifecycle (Resource) Creates or updates lifecycle policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-put-lifecycle.html and https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-index-lifecycle.html @@ -434,6 +435,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_index_lifecycle.my_ilm / ``` diff --git a/docs/resources/elasticsearch_index_template.md b/docs/resources/elasticsearch_index_template.md index 08715c32e..e6cc51aa6 100644 --- a/docs/resources/elasticsearch_index_template.md +++ b/docs/resources/elasticsearch_index_template.md @@ -1,12 +1,13 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_index_template Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index_template Resource" description: |- - Creates or updates an index template. + Creates or updates an index template. Index templates define settings, mappings, and aliases that can be applied automatically to new indices. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-template.html --- -# Resource: elasticstack_elasticsearch_index_template +# elasticstack_elasticsearch_index_template (Resource) Creates or updates an index template. Index templates define settings, mappings, and aliases that can be applied automatically to new indices. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-template.html @@ -136,6 +137,8 @@ Required: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_index_template.my_template / ``` diff --git a/docs/resources/elasticsearch_ingest_pipeline.md b/docs/resources/elasticsearch_ingest_pipeline.md index 53f4c94dd..22d371057 100644 --- a/docs/resources/elasticsearch_ingest_pipeline.md +++ b/docs/resources/elasticsearch_ingest_pipeline.md @@ -1,24 +1,24 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_pipeline Resource - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_pipeline Resource" description: |- - Manages Ingest Pipelines + Manages tasks and resources related to ingest pipelines and processors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-apis.html --- -# Resource: elasticstack_elasticsearch_ingest_pipeline +# elasticstack_elasticsearch_ingest_pipeline (Resource) -Use ingest APIs to manage tasks and resources related to ingest pipelines and processors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-apis.html +Manages tasks and resources related to ingest pipelines and processors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-apis.html ## Example Usage -You can provide your custom JSON definitions for the ingest processors: - ```terraform provider "elasticstack" { elasticsearch {} } +// You can provide the ingest pipeline processors as plain JSON objects. resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { name = "my_ingest_pipeline" description = "My first ingest pipeline managed by Terraform" @@ -43,12 +43,8 @@ EOF , ] } -``` - -Or you can use data sources and Terraform declarative way of defining the ingest processors: - -```terraform +// Or you can use the provided data sources to create the processor data sources. data "elasticstack_elasticsearch_ingest_processor_set" "set_count" { field = "count" value = 1 @@ -69,7 +65,6 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "ingest" { } ``` - ## Schema @@ -113,6 +108,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_ingest_pipeline.my_ingest_pipeline / ``` diff --git a/docs/resources/elasticsearch_logstash_pipeline.md b/docs/resources/elasticsearch_logstash_pipeline.md index bf7f49394..e89379a50 100644 --- a/docs/resources/elasticsearch_logstash_pipeline.md +++ b/docs/resources/elasticsearch_logstash_pipeline.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_logstash_pipeline Resource - terraform-provider-elasticstack" subcategory: "Logstash" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_logstash_pipeline Resource" description: |- - Creates or updates centrally managed logstash pipelines. + Manage Logstash Pipelines via Centralized Pipeline Management. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-apis.html --- -# Resource: elasticstack_elasticsearch_logstash_pipeline +# elasticstack_elasticsearch_logstash_pipeline (Resource) -Creates or updates centrally managed logstash pipelines. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-apis.html +Manage Logstash Pipelines via Centralized Pipeline Management. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-apis.html ## Example Usage @@ -112,6 +113,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_logstash_pipeline.example / ``` diff --git a/docs/resources/elasticsearch_script.md b/docs/resources/elasticsearch_script.md index f551360af..d3996234b 100644 --- a/docs/resources/elasticsearch_script.md +++ b/docs/resources/elasticsearch_script.md @@ -1,14 +1,15 @@ + --- -subcategory: "Cluster" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_script Resource" +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_script Resource - terraform-provider-elasticstack" +subcategory: "Elasticsearch" description: |- - Creates or updates a stored script or search template. + Creates or updates a stored script or search template. See the create stored script API documentation https://www.elastic.co/guide/en/elasticsearch/reference/current/create-stored-script-api.html for more details. --- -# Resource: elasticstack_elasticsearch_script +# elasticstack_elasticsearch_script (Resource) -Creates or updates a stored script or search template. See https://www.elastic.co/guide/en/elasticsearch/reference/current/create-stored-script-api.html +Creates or updates a stored script or search template. See the [create stored script API documentation](https://www.elastic.co/guide/en/elasticsearch/reference/current/create-stored-script-api.html) for more details. ## Example Usage @@ -85,6 +86,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_script.my_script /